From d27a41f9cc9ca213136c098be772024ac39690b7 Mon Sep 17 00:00:00 2001 From: Xanthe Hoad <xanthe.hoad@cern.ch> Date: Mon, 23 Jan 2017 14:38:39 +0100 Subject: [PATCH] MaM v1.4.6 (TrigHLTMonitoring-00-07-38) 2016-01-23 Xanthe Hoad <xhoad@cern.ch> * doMaM_ExtractAndDumpConfigs is now usable * Few small bug fixes * MaM v1.4.6 * tagging TrigHLTMonitoring-00-07-38 2016-01-17 Xanthe Hoad <xhoad@cern.ch> * Fixes/updates for rel21 * Release can be determined in cmt and cmake releases * SVN package tags can be determined for cmt/cmake releases (git is still to do) * Updates to GUI, adding drop down lists, search * MAM v1.4.5 * tagging TrigHLTMonitoring-00-07-37 2016-12-08 Xanthe Hoad <xhoad@cern.ch> * Integrating developments from rel20 branch to trunk for rel21 2016-12-01 Xanthe Hoad <xhoad@cern.ch> * MAM: Defaults no longer need to be uploaded to the database * mam.make_patch_json() should be used to make new MCKs ... (Long ChangeLog diff - truncated) --- .../TrigHLTMonitoring/CMakeLists.txt | 11 +- .../TrigHLTMonitoring/cmt/requirements | 13 +- .../database/HLTMonitoring_topOptions.py | 61 - .../TrigHLTMonitoring/database/MaDQM.py | 87 -- .../TrigHLTMonitoring/database/MaDQM1.py | 153 --- .../database/MenuAwareDQMonitoring.cxx | 202 --- .../database/MenuAwareDQMonitoring.h | 87 -- .../database/TrigHLTMonitoring_entries.cxx | 18 - .../database/activeMCK_del.sql | 3 - .../database/activeMCK_schema.sql | 8 - .../TrigHLTMonitoring/database/del.sql | 22 - .../dictionaries/BJET_Cosmics_dictionary.py | 11 - .../dictionaries/BJET_Physics_dictionary.py | 11 - .../dictionaries/BJET_Standby_dictionary.py | 11 - .../dictionaries/BPHYS_Cosmics_dictionary.py | 11 - .../dictionaries/BPHYS_Physics_dictionary.py | 11 - .../dictionaries/BPHYS_Standby_dictionary.py | 11 - .../CALOESD_Cosmics_dictionary.py | 11 - .../CALOESD_Physics_dictionary.py | 11 - .../CALOESD_Standby_dictionary.py | 11 - .../CALORAW_Cosmics_dictionary.py | 11 - .../CALORAW_Physics_dictionary.py | 11 - .../CALORAW_Standby_dictionary.py | 11 - .../dictionaries/EGAMMA_Cosmics_dictionary.py | 13 - .../dictionaries/EGAMMA_Physics_dictionary.py | 13 - .../dictionaries/EGAMMA_Standby_dictionary.py | 13 - .../dictionaries/IDJPSI_Cosmics_dictionary.py | 11 - .../dictionaries/IDJPSI_Physics_dictionary.py | 11 - .../dictionaries/IDJPSI_Standby_dictionary.py | 11 - .../dictionaries/IDTRK_Cosmics_dictionary.py | 12 - .../dictionaries/IDTRK_Physics_dictionary.py | 12 - .../dictionaries/IDTRK_Standby_dictionary.py | 12 - .../dictionaries/JET_Cosmics_dictionary.py | 11 - .../dictionaries/JET_Physics_dictionary.py | 11 - .../dictionaries/JET_Standby_dictionary.py | 11 - .../dictionaries/MET_Cosmics_dictionary.py | 11 - .../dictionaries/MET_Physics_dictionary.py | 11 - .../dictionaries/MET_Standby_dictionary.py | 11 - .../MINBIAS_Cosmics_dictionary.py | 12 - .../MINBIAS_Physics_dictionary.py | 12 - .../MINBIAS_Standby_dictionary.py | 12 - .../dictionaries/MUON_Cosmics_dictionary.py | 11 - .../dictionaries/MUON_Physics_dictionary.py | 11 - .../dictionaries/MUON_Standby_dictionary.py | 11 - .../dictionaries/TAU_Cosmics_dictionary.py | 11 - .../dictionaries/TAU_Physics_dictionary.py | 11 - .../dictionaries/TAU_Standby_dictionary.py | 11 - .../TrigHLTMonitoring/database/env.sh | 3 - .../database/fill_activeMCK_table.py | 32 - .../database/fill_cool_madqm.py | 142 --- .../database/fill_smk2mck_table.py | 46 - .../database/insertInfoToDB.py | 351 ------ .../database/readConfigFromDB.py | 213 ---- .../database/readDictionary.py | 149 --- .../TrigHLTMonitoring/database/requirements | 47 - .../TrigHLTMonitoring/database/schema.sql | 40 - .../database/smk2mck_del.sql | 5 - .../database/smk2mck_schema.sql | 11 - .../TrigHLTMonitoring/database/tools.py | 1001 --------------- .../TrigHLTMonitoring/database/upload.py | 663 ---------- .../java/GUI/TrigMaMGUI.java | 1033 ++++++++------- .../java/GUI/TrigMaMGUI_TRIGGERDBREPR.java | 490 -------- .../{start_TrigMaMGUI.sh => TrigMaMGUI_P1.sh} | 15 +- .../java/TrigMaMGUI_TRIGGERDBREPR.sh | 31 + .../java/start_TrigMaMGUI_TRIGGERDBREPR.sh | 43 - .../python/HLTMonTriggerList.py | 164 +-- .../python/MenuAwareMonitoring.py | 704 ++++++----- .../python/MenuAwareMonitoringStandalone.py | 1116 ++++++++++------- .../python/OracleInterface.py | 280 +++-- .../python/PackagesToInterrogate.py | 20 +- .../python/ToolInterrogator.py | 43 +- .../python/scripts/DumpDefaultMonConfig.sh | 23 + .../python/scripts/MCKtoCOOLmanual.py | 85 ++ .../python/scripts/RunProcessWithMonitor.py | 37 + .../share/HLTMonitoring_topOptions.py | 31 +- .../TrigHLTMonitoring/share/addMonTools.py | 268 ++-- .../TrigHLTMonitoring/src/IHLTMonTool.cxx | 9 +- 77 files changed, 2313 insertions(+), 5855 deletions(-) delete mode 100755 Trigger/TrigMonitoring/TrigHLTMonitoring/database/HLTMonitoring_topOptions.py delete mode 100755 Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM.py delete mode 100755 Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM1.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/MenuAwareDQMonitoring.cxx delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/MenuAwareDQMonitoring.h delete mode 100755 Trigger/TrigMonitoring/TrigHLTMonitoring/database/TrigHLTMonitoring_entries.cxx delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/activeMCK_del.sql delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/activeMCK_schema.sql delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/del.sql delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Cosmics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Physics_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Standby_dictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/env.sh delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_activeMCK_table.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_cool_madqm.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_smk2mck_table.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/insertInfoToDB.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/readConfigFromDB.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/readDictionary.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/requirements delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/schema.sql delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/smk2mck_del.sql delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/smk2mck_schema.sql delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/tools.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/database/upload.py delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/java/GUI/TrigMaMGUI_TRIGGERDBREPR.java rename Trigger/TrigMonitoring/TrigHLTMonitoring/java/{start_TrigMaMGUI.sh => TrigMaMGUI_P1.sh} (60%) create mode 100755 Trigger/TrigMonitoring/TrigHLTMonitoring/java/TrigMaMGUI_TRIGGERDBREPR.sh delete mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/java/start_TrigMaMGUI_TRIGGERDBREPR.sh create mode 100755 Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/DumpDefaultMonConfig.sh create mode 100644 Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/MCKtoCOOLmanual.py create mode 100755 Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/RunProcessWithMonitor.py diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/CMakeLists.txt b/Trigger/TrigMonitoring/TrigHLTMonitoring/CMakeLists.txt index bf3797555bcc..7c2fce0df150 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/CMakeLists.txt +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/CMakeLists.txt @@ -14,11 +14,9 @@ atlas_depends_on_subdirs( PUBLIC LumiBlock/LumiCalc Trigger/TrigAnalysis/TrigDecisionTool Trigger/TrigConfiguration/TrigConfigSvc - Trigger/TrigConfiguration/TrigConfInterfaces Trigger/TrigEvent/TrigSteeringEvent PRIVATE Control/AthenaKernel - Database/AthenaPOOL/AthenaPoolUtilities Event/EventInfo Event/xAOD/xAODTrigger ) @@ -33,14 +31,17 @@ atlas_add_library( TrigHLTMonitoringLib PUBLIC_HEADERS TrigHLTMonitoring PRIVATE_INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS} ${CORAL_INCLUDE_DIRS} LINK_LIBRARIES xAODLuminosity GaudiKernel TrigSteeringEvent AthenaMonitoringLib StoreGateLib SGtests LumiBlockCoolQuery TrigDecisionToolLib TrigConfigSvcLib - PRIVATE_LINK_LIBRARIES ${ROOT_LIBRARIES} ${Boost_LIBRARIES} ${CORAL_LIBRARIES} AthenaKernel AthenaPoolUtilities EventInfo xAODTrigger ) + PRIVATE_LINK_LIBRARIES ${ROOT_LIBRARIES} ${Boost_LIBRARIES} ${CORAL_LIBRARIES} AthenaKernel EventInfo xAODTrigger ) atlas_add_component( TrigHLTMonitoring src/components/*.cxx INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS} ${CORAL_INCLUDE_DIRS} - LINK_LIBRARIES ${ROOT_LIBRARIES} ${Boost_LIBRARIES} ${CORAL_LIBRARIES} AthenaMonitoringLib StoreGateLib SGtests xAODLuminosity GaudiKernel LumiBlockCoolQuery TrigDecisionToolLib TrigConfigSvcLib TrigSteeringEvent AthenaKernel AthenaPoolUtilities EventInfo xAODTrigger TrigHLTMonitoringLib ) + LINK_LIBRARIES ${ROOT_LIBRARIES} ${Boost_LIBRARIES} ${CORAL_LIBRARIES} AthenaMonitoringLib StoreGateLib SGtests xAODLuminosity GaudiKernel LumiBlockCoolQuery TrigDecisionToolLib TrigConfigSvcLib TrigSteeringEvent AthenaKernel EventInfo xAODTrigger TrigHLTMonitoringLib ) # Install files from the package: atlas_install_python_modules( python/*.py ) atlas_install_joboptions( share/*.py ) - +atlas_install_scripts( python/scripts/*.py python/scripts/*.sh java/TrigMaMGUI_TRIGGERDBREPR.sh ) +atlas_install_generic( java/GUI/TrigMaMGUI.java + DESTINATION java/ + TYPEANAME JavaGUI ) diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/cmt/requirements b/Trigger/TrigMonitoring/TrigHLTMonitoring/cmt/requirements index b7c9ed67f684..e1b9ffc30674 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/cmt/requirements +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/cmt/requirements @@ -17,19 +17,17 @@ use TrigSteeringEvent TrigSteeringEvent-* Trigger/TrigEvent use TrigDecisionTool TrigDecisionTool-* Trigger/TrigAnalysis #use TrigDecision TrigDecision-* Trigger/TrigEvent use TrigConfigSvc TrigConfigSvc-* Trigger/TrigConfiguration -use TrigConfInterfaces TrigConfInterfaces-* Trigger/TrigConfiguration +use TrigConfInterfaces TrigConfInterfaces-* Trigger/TrigConfiguration -use LumiCalc LumiCalc-* LumiBlock -use xAODLuminosity xAODLuminosity-* Event/xAOD +use LumiCalc LumiCalc-* LumiBlock +use xAODLuminosity xAODLuminosity-* Event/xAOD private use AthenaKernel AthenaKernel-* Control use AtlasBoost AtlasBoost-* External use AtlasROOT AtlasROOT-* External use EventInfo EventInfo-* Event -use AtlasCORAL AtlasCORAL-* External -use AthenaPoolUtilities AthenaPoolUtilities-* Database/AthenaPOOL -use xAODTrigger xAODTrigger-* Event/xAOD +use xAODTrigger xAODTrigger-* Event/xAOD end_private @@ -40,3 +38,6 @@ end_private apply_pattern declare_joboptions files="*.py" apply_pattern declare_python_modules files="*.py" apply_pattern dual_use_library files=*.cxx +apply_pattern declare_scripts files="../python/scripts/*.py ../python/scripts/*.sh ../java/TrigMaMGUI_TRIGGERDBREPR.sh" +apply_pattern generic_declare_for_copy kind=javagui prefix="share/java" \ + files="../java/GUI/TrigMaMGUI.java" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/HLTMonitoring_topOptions.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/HLTMonitoring_topOptions.py deleted file mode 100755 index dd84676def62..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/HLTMonitoring_topOptions.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -print "hello from TrigHLTMonitoringJobOptions_forRecExCommission.py" - -######## flags ########### - -if not 'HLTMonFlags' in dir(): - from TrigHLTMonitoring.HLTMonFlags import HLTMonFlags - -if not 'DQMonFlags' in dir(): - from AthenaMonitoring.DQMonFlags import DQMonFlags - - ########## control step assignment ######### - -if DQMonFlags.monManEnvironment == 'tier0Raw': - # we are in RAW->ESD step - # run all tools *except* the following (these are run in ESD->AOD) - print 'HLTMonitoring_topOptions.py: environment is tier0Raw' - HLTMonFlags.doGeneral = False - HLTMonFlags.doBjet = False - HLTMonFlags.doBphys = False - HLTMonFlags.doMET = False - HLTMonFlags.doJet = False - HLTMonFlags.doEgamma = False - HLTMonFlags.doMuon = False - HLTMonFlags.doIDtrk = False - HLTMonFlags.doTau = False - HLTMonFlags.doMinBias = False - HLTMonFlags.doDump = False - HLTMonFlags.doOfflineTauTTP = False - HLTMonFlags.doIDJpsiMon = False - #HLTMonFlags.doCalo = True # tk -elif DQMonFlags.monManEnvironment == 'tier0ESD': - # we are in ESD->AOD step - # run all tools *except* the following (these are run in RAW->ESD) - print 'HLTMonitoring_topOptions.py: environment is tier0ESD' - - # here we decide at all if we want to do monitoring for these slices - HLTMonFlags.doBjet = False - HLTMonFlags.doBphys = False - HLTMonFlags.doMET = False - HLTMonFlags.doJet = True - HLTMonFlags.doEgamma = True - HLTMonFlags.doMuon = True - HLTMonFlags.doIDtrk = False - HLTMonFlags.doTau = True - HLTMonFlags.doMinBias = True - #HLTMonFlags.doDump = False # don't change here, not implemented yet - #HLTMonFlags.doOfflineTauTTP = False # don't change here, not implemented yet - HLTMonFlags.doIDJpsiMon = True - HLTMonFlags.doCalo = True - - # HLTMonFlags.doCalo = False -else : - print 'HLTMonitoring_topOptions.py: environment is neither tier0Raw nor tier0ESD' - print 'HLTMonitoring_topOptions.py: switching all tools off...' - HLTMonFlags.doGeneral = False - HLTMonFlags.doMonTier0 = False - -include( "TrigHLTMonitoring/addMonTools.py" ) -include("TrigHLTMonitoring/MaDQM.py") diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM.py deleted file mode 100755 index ade3d256df3a..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -print "hello from MaDQM.py" - -def getAndSetConfig(dbconnection, MCK, runtype, slice_name, reco_step, *vargs, **kwargs): - # here we check which entry there is in the DB for this slice - config_thisFunction = readSliceConfigFromDB(dbconnection, MCK, runtype, slice_name, reco_step) - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_thisFunction != None: - for (toolname, toolconfig) in config_thisFunction['Config']: - # Ben: if the tool is not loaded into ToolSvc then we best load it now! - if hasattr(ToolSvc,toolname) == False: - exec "from %s import %s" % (config_thisFunction["PkgName"],config_thisFunction["CreatorName"]) - exec "%s()" % (config_thisFunction["CreatorName"]) - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - -# read monitoring configureation from the database -# calling readSliceConfigFromDB(dbconnection, MCK, runtype, slicename, recostep) returns monitoring configuration -# dbconnection is 'oracle' -# MCK can be: any MCK number or -# 'ACTIVE_KEY' or -# 'LAST_MCK' - -# runtype can be 'Physics', 'Standby' or 'Cosmics' -# slicename: 'TAU','EGAMMA','JET','BJET','BPHYS','MET','MINBIAS','MUON','IDTRK','IDJPSI', 'CALORAW', 'CALOESD' -# recostep: 'RAW' or 'ESD' - -include("TrigHLTMonitoring/readConfigFromDB.py") - -# if HLTMonFlags.doCalo and DQMonFlags.monManEnvironment == 'tier0Raw': -# # here we check which entry there is in the DB for this slice -# config_caloraw = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'CALORAW' , 'RAW') -# # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way -# if config_caloraw != None: -# for (toolname, toolconfig) in config_caloraw['Config']: -# #print "BENtest1:" -# #print "ToolSvc._Configurable__children",ToolSvc._Configurable__children -# #print "config_caloraw[\"PkgName\"]",config_caloraw["PkgName"] -# #print "config_caloraw[\"CreatorName\"]",config_caloraw["CreatorName"] -# exec "from %s import %s" % (config_caloraw["PkgName"],config_caloraw["CreatorName"]) -# exec "%s()" % (config_caloraw["CreatorName"]) -# #print "BENtest2:" -# #print "ToolSvc._Configurable__children",ToolSvc._Configurable__children -# #print "hasattr(ToolSvc,\"HLTCalo\")",hasattr(ToolSvc,"HLTCalo") -# # modify defaults according to config dictionary -# tool = getattr(ToolSvc,toolname) -# for confattr,confvalue in toolconfig.items(): -# tool.__setattr__(confattr, confvalue) - -if HLTMonFlags.doCalo and DQMonFlags.monManEnvironment == 'tier0Raw': - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'CALORAW' , 'RAW') - -if HLTMonFlags.doCalo and DQMonFlags.monManEnvironment == 'tier0ESD': - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'CALOESD' , 'ESD') - -if HLTMonFlags.doTau: - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'TAU' , 'ESD') - -if HLTMonFlags.doBjet: - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'BJET' , 'ESD') - -if HLTMonFlags.doBphys: - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'BPHYS' , 'ESD') - -if HLTMonFlags.doMET: - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'MET' , 'ESD') - -if HLTMonFlags.doJet: - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'JET' , 'ESD') - -if HLTMonFlags.doEgamma: - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'EGAMMA' , 'ESD') - -if HLTMonFlags.doMuon: - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'MUON' , 'ESD') - -if HLTMonFlags.doIDtrk: - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'IDTRK' , 'ESD') - -if HLTMonFlags.doMinBias: - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'MINBIAS' , 'ESD') - -if HLTMonFlags.doIDJpsiMon: - getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'IDJPSI' , 'ESD') diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM1.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM1.py deleted file mode 100755 index 69c340ee7012..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM1.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -print "hello from MaDQM.py" - -# read monitoring configureation from the database -# calling readSliceConfigFromDB(dbconnection, MCK, runtype, slicename, recostep) returns monitoring configuration -# dbconnection is 'oracle' -# MCK can be: any MCK number or -# 'ACTIVE_KEY' or -# 'LAST_MCK' - -# runtype can be 'Physics', 'Standby' or 'Cosmics' -# slicename: 'TAU','EGAMMA','JET','BJET','BPHYS','MET','MINBIAS','MUON','IDTRK','IDJPSI', 'CALORAW', 'CALOESD' -# recostep: 'RAW' or 'ESD' - -include("TrigHLTMonitoring/readConfigFromDB.py") - -def configureMonitoring(MCK_number): - - if HLTMonFlags.doCalo and DQMonFlags.monManEnvironment == 'tier0Raw': - # here we check which entry there is in the DB for this slice - #config_caloraw = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'CALORAW' , 'RAW') - config_caloraw = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'CALORAW' , 'RAW') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_caloraw != None: - for (toolname, toolconfig) in config_caloraw['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - - if HLTMonFlags.doCalo and DQMonFlags.monManEnvironment == 'tier0ESD': - # here we check which entry there is in the DB for this slice - config_caloesd = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'CALOESD' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_caloesd != None: - for (toolname, toolconfig) in config_caloesd['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - if HLTMonFlags.doTau: - # here we check which entry there is in the DB for this slice - config_tau = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'TAU' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_tau != None: - for (toolname, toolconfig) in config_tau['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - if HLTMonFlags.doBjet: - # here we check which entry there is in the DB for this slice - config_bjet = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'BJET' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_bjet != None: - for (toolname, toolconfig) in config_bjet['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - if HLTMonFlags.doBphys: - # here we check which entry there is in the DB for this slice - config_bphys = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'BPHYS' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_bphys != None: - for (toolname, toolconfig) in config_bphys['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - if HLTMonFlags.doMET: - # here we check which entry there is in the DB for this slice - config_met = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'MET' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_met != None: - for (toolname, toolconfig) in config_met['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - if HLTMonFlags.doJet: - # here we check which entry there is in the DB for this slice - config_jet = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'JET' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_jet != None: - for (toolname, toolconfig) in config_jet['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - if HLTMonFlags.doEgamma: - # here we check which entry there is in the DB for this slice - config_egamma = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'EGAMMA' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_egamma != None: - for (toolname, toolconfig) in config_egamma['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - if HLTMonFlags.doMuon: - # here we check which entry there is in the DB for this slice - config_muon = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'MUON' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_muon != None: - for (toolname, toolconfig) in config_muon['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - if HLTMonFlags.doIDtrk: - # here we check which entry there is in the DB for this slice - config_idtrk = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'IDTRK' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_idtrk != None: - for (toolname, toolconfig) in config_idtrk['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - if HLTMonFlags.doMinBias: - # here we check which entry there is in the DB for this slice - config_minbias = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'MINBIAS' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_minbias != None: - for (toolname, toolconfig) in config_minbias['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - - if HLTMonFlags.doIDJpsiMon: - # here we check which entry there is in the DB for this slice - config_idjpsi = readSliceConfigFromDB('oracle',MCK_number,'Physics', 'IDJPSI' , 'ESD') - # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way - if config_idjpsi != None: - for (toolname, toolconfig) in config_idjpsi['Config']: - # modify defaults according to config dictionary - tool = getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - tool.__setattr__(confattr, confvalue) - diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MenuAwareDQMonitoring.cxx b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MenuAwareDQMonitoring.cxx deleted file mode 100644 index 8c615765f73e..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MenuAwareDQMonitoring.cxx +++ /dev/null @@ -1,202 +0,0 @@ -///////////////////////// -*- C++ -*- ///////////////////////////// - -/* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration -*/ - -// MenuAwareDQMonitoring.cxx -// Implementation file for class MenuAwareDQMonitoring -// Author: Tatsiana Klimkovich <tklimk@cern.ch> -/////////////////////////////////////////////////////////////////// - -// TrigHLTMonitoring includes -#include "TrigHLTMonitoring/MenuAwareDQMonitoring.h" - -// STL includes - -// FrameWork includes -#include "GaudiKernel/Property.h" - -// Athena includes -#include "StoreGate/StoreGate.h" -#include "AthenaKernel/IAthenaOutputStreamTool.h" - -//// Gaudi includes -//#include "GaudiKernel/MsgStream.h" -//#include "GaudiKernel/IIncidentSvc.h" -//#include "GaudiKernel/GaudiException.h" -//#include "GaudiKernel/IToolSvc.h" -// -//// Event Info -//#include "EventInfo/EventIncident.h" -//#include "EventInfo/EventInfo.h" -//#include "EventInfo/EventID.h" -//#include "EventInfo/EventType.h" - -//#include "CoolKernel/Record.h" - -// AttributeList -#include "CoralBase/Attribute.h" -#include "CoralBase/Blob.h" -#include "CoralBase/AttributeListSpecification.h" -#include "AthenaPoolUtilities/AthenaAttributeList.h" -#include "AthenaPoolUtilities/CondAttrListCollection.h" - -// for online testing -#include <sys/ipc.h> -#include <sys/msg.h> - -#include <stdint.h> - - -/////////////////////////////////////////////////////////////////// -// Public methods: -/////////////////////////////////////////////////////////////////// - -// Constructors -//////////////// -MenuAwareDQMonitoring::MenuAwareDQMonitoring( const std::string& name, - ISvcLocator* pSvcLocator ) : - ::AthAlgorithm( name, pSvcLocator ), - m_MCK("0") -{ - // - // Property declaration - // - //declareProperty( "Property", m_nProperty ); -} - -// Destructor -/////////////// -MenuAwareDQMonitoring::~MenuAwareDQMonitoring() -{} - -// Athena Algorithm's Hooks -//////////////////////////// -StatusCode MenuAwareDQMonitoring::initialize() -{ - ATH_MSG_INFO ("Initializing " << name() << "..."); - - myfile.open ("example.txt"); - - //StatusCode sc; - MsgStream log(msgSvc(), name()); - log <<MSG::DEBUG <<"in initialize()" <<endmsg; - - // Storegate - StatusCode sc = service("StoreGateSvc", m_sgSvc); - if (sc.isFailure()) { - log << MSG::ERROR << "Unable to get the StoreGateSvc" << endmsg; - return sc; - } - - // locate the conditions store ptr to it. - sc = service("DetectorStore", m_detStore); - if (!sc.isSuccess() || 0 == m_detStore) { - log <<MSG::ERROR <<"Could not find DetStore" <<endmsg; - return StatusCode::FAILURE; - } - - return StatusCode::SUCCESS; -} - -StatusCode MenuAwareDQMonitoring::finalize() -{ - ATH_MSG_INFO ("Finalizing " << name() << "..."); - //myfile.close(); - return StatusCode::SUCCESS; -} - -StatusCode MenuAwareDQMonitoring::execute() -{ - ATH_MSG_DEBUG ("Executing " << name() << "..."); - ATH_MSG_INFO ("This is the message of Tanya!"); - - // here we are going to read MCK number according to IOV - - StatusCode sc; - - //const AthenaAttributeList* attrList = 0; - const CondAttrListCollection* attrListColl = 0; - //const CondAttrListCollection* attrListColl = 0; - //const CondAttrListCollection* attrListColl = 0; - - // AttrList - sc = m_detStore->retrieve(attrListColl, "/TRIGGER/HLT/MenuAwareDQMonitoring5"); - if (sc.isFailure()) { - ATH_MSG_ERROR ("Could not retrieve MenuAwareDQMonitoring5"); - //log <<MSG::ERROR <<"Could not retrieve IOVDbTestAttrList" <<endmsg; - // Using COOL, is failure - return( StatusCode::FAILURE); - } - - std::ostringstream attrStr2; - - // Loop over collection - CondAttrListCollection::const_iterator first = attrListColl->begin(); - CondAttrListCollection::const_iterator last = attrListColl->end(); - for (; first != last; ++first) { - std::ostringstream attrStr1; - (*first).second.toOutputStream( attrStr1 ); - ATH_MSG_DEBUG ("ChanNum " << (*first).first); - // print out the name if present - if (attrListColl->name_size()>0) { - CondAttrListCollection::name_const_iterator nitr=attrListColl->chanNamePair((*first).first); - if (nitr!=attrListColl->name_end()) - ATH_MSG_DEBUG( " name " << nitr->second); - } - ATH_MSG_DEBUG(" Attribute list " << attrStr1.str() ); - - const coral::AttributeList& attrList = (*first).second; - - int mck = attrList["MCKdata"].data<uint32_t>(); - - myfile << mck << " \n"; - myfile.close(); - // Print out range if it exits - CondAttrListCollection::ChanNum chanNum = (*first).first; - CondAttrListCollection::iov_const_iterator iovIt = attrListColl->chanIOVPair(chanNum); - if (iovIt != attrListColl->iov_end()) { - const IOVRange& range = (*iovIt).second; - if(range.start().isTimestamp()) { - ATH_MSG_DEBUG ("Range timestamp : since " << range.start().timestamp() - << " till " << range.stop().timestamp() ); - } - else { - ATH_MSG_DEBUG("Range R/E : since " << range.start().run() << " " - << range.start().event() - << " till " << range.stop().run() << " " - << range.stop().event()); - } - } - else { - ATH_MSG_DEBUG("No range found "); - } - } - - - - return StatusCode::SUCCESS; -} - -/////////////////////////////////////////////////////////////////// -// Const methods: -/////////////////////////////////////////////////////////////////// - -/////////////////////////////////////////////////////////////////// -// Non-const methods: -/////////////////////////////////////////////////////////////////// - -/////////////////////////////////////////////////////////////////// -// Protected methods: -/////////////////////////////////////////////////////////////////// - -/////////////////////////////////////////////////////////////////// -// Const methods: -/////////////////////////////////////////////////////////////////// - -/////////////////////////////////////////////////////////////////// -// Non-const methods: -/////////////////////////////////////////////////////////////////// - - diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MenuAwareDQMonitoring.h b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MenuAwareDQMonitoring.h deleted file mode 100644 index c1cb2bd539a5..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MenuAwareDQMonitoring.h +++ /dev/null @@ -1,87 +0,0 @@ -///////////////////////// -*- C++ -*- ///////////////////////////// - -/* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration -*/ - -// MenuAwareDQMonitoring.h -// Header file for class MenuAwareDQMonitoring -// Author: Tatsiana Klimkovich <tklimk@cern.ch> -/////////////////////////////////////////////////////////////////// -#ifndef TRIGHLTMONITORING_MENUAWAREDQMONITORING_H -#define TRIGHLTMONITORING_MENUAWAREDQMONITORING_H 1 - -#include <iostream> -#include <fstream> - -// STL includes -#include <string> - -// FrameWork includes -#include "AthenaBaseComps/AthAlgorithm.h" - -#include "StoreGate/DataHandle.h" - -class StoreGateSvc; - -class MenuAwareDQMonitoring - : public ::AthAlgorithm -{ - - /////////////////////////////////////////////////////////////////// - // Public methods: - /////////////////////////////////////////////////////////////////// - public: - - // Copy constructor: - - /// Constructor with parameters: - MenuAwareDQMonitoring( const std::string& name, ISvcLocator* pSvcLocator ); - - /// Destructor: - virtual ~MenuAwareDQMonitoring(); - - // Assignment operator: - //MenuAwareDQMonitoring &operator=(const MenuAwareDQMonitoring &alg); - - // Athena algorithm's Hooks - virtual StatusCode initialize(); - virtual StatusCode execute(); - virtual StatusCode finalize(); - - /////////////////////////////////////////////////////////////////// - // Const methods: - /////////////////////////////////////////////////////////////////// - - /////////////////////////////////////////////////////////////////// - // Non-const methods: - /////////////////////////////////////////////////////////////////// - - /////////////////////////////////////////////////////////////////// - // Private data: - /////////////////////////////////////////////////////////////////// - private: - - StoreGateSvc* m_sgSvc; - StoreGateSvc* m_detStore; - StoreGateSvc* m_metadataStore; - std::string m_MCK; - std::ofstream myfile; - - /// Default constructor: - MenuAwareDQMonitoring(); - - /// Containers - - -}; - -// I/O operators -////////////////////// - -/////////////////////////////////////////////////////////////////// -// Inline methods: -/////////////////////////////////////////////////////////////////// - - -#endif //> !TRIGHLTMONITORING_MENUAWAREDQMONITORING_H diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/TrigHLTMonitoring_entries.cxx b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/TrigHLTMonitoring_entries.cxx deleted file mode 100755 index 092b95773ada..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/TrigHLTMonitoring_entries.cxx +++ /dev/null @@ -1,18 +0,0 @@ -/* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration -*/ - -#include "TrigHLTMonitoring/IHLTMonTool.h" -#include "TrigHLTMonitoring/HLTMonTool.h" -#include "TrigHLTMonitoring/MenuAwareDQMonitoring.h" - -#include "GaudiKernel/DeclareFactoryEntries.h" - -DECLARE_TOOL_FACTORY(HLTMonTool ) - -DECLARE_FACTORY_ENTRIES(TrigHLTMonitoring) { -DECLARE_ALGTOOL( HLTMonTool ) -} - -DECLARE_ALGORITHM_FACTORY(MenuAwareDQMonitoring) - diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/activeMCK_del.sql b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/activeMCK_del.sql deleted file mode 100644 index 63f3595af8b8..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/activeMCK_del.sql +++ /dev/null @@ -1,3 +0,0 @@ -DROP INDEX activeMCK_ind; -ALTER TABLE activeMCK_table DROP CONSTRAINT activeMCK_pk; -DROP TABLE activeMCK_table; diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/activeMCK_schema.sql b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/activeMCK_schema.sql deleted file mode 100644 index e86f23772570..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/activeMCK_schema.sql +++ /dev/null @@ -1,8 +0,0 @@ -CREATE TABLE activeMCK_table ( - activeMCK_id NUMBER(10), - activeMCK_name VARCHAR2(200), - activeMCK NUMBER(30), - CONSTRAINT activeMCK_pk PRIMARY KEY(activeMCK_id) -); -CREATE INDEX activeMCK_ind ON activeMCK_table(activeMCK); - diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/del.sql b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/del.sql deleted file mode 100644 index 11cdad81b3ff..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/del.sql +++ /dev/null @@ -1,22 +0,0 @@ -DROP INDEX mck_id_index; -DROP INDEX mck_default_index; -DROP INDEX smck_id_index; -DROP INDEX smck_config_hash_index; -DROP INDEX link_mck_index; -DROP INDEX link_smck_index; -ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_smck_fk; -ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_smck_nn; -ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_mck_fk; -ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_mck_nn; -ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_u; -ALTER TABLE smck_table DROP CONSTRAINT smck_config_hash_nn; -ALTER TABLE smck_table DROP CONSTRAINT smck_config_nn; -ALTER TABLE smck_table DROP CONSTRAINT smck_slice_version_nn; -ALTER TABLE smck_table DROP CONSTRAINT smck_slice_version_u; -ALTER TABLE smck_table DROP CONSTRAINT smck_slice_type_nn; -ALTER TABLE smck_table DROP CONSTRAINT smck_pk; -ALTER TABLE mck_table DROP CONSTRAINT mck_default_nn; -ALTER TABLE mck_table DROP CONSTRAINT mck_pk; -DROP TABLE mck_to_smck_link; -DROP TABLE smck_table; -DROP TABLE mck_table; diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Cosmics_dictionary.py deleted file mode 100644 index cf75de96de56..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Cosmics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='BJET' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("HLTBjetMon", {"Taggers" : ["IP1D", "IP2D", "IP3D"],})] -PkgName = "TrigBjetMonitoring.TrigBjetMonitoringConfig" -CreatorName = "TrigBjetMonitoringConfig" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Physics_dictionary.py deleted file mode 100644 index ceb50ceedb9a..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Physics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='BJET' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("HLTBjetMon", {"Taggers" : ["IP1D", "IP2D", "IP3D"],})] -PkgName = "TrigBjetMonitoring.TrigBjetMonitoringConfig" -CreatorName = "TrigBjetMonitoringConfig" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Standby_dictionary.py deleted file mode 100644 index 72a36078ec96..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BJET_Standby_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='BJET' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("HLTBjetMon", {"Taggers" : ["IP1D", "IP2D", "IP3D"],})] -PkgName = "TrigBjetMonitoring.TrigBjetMonitoringConfig" -CreatorName = "TrigBjetMonitoringConfig" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Cosmics_dictionary.py deleted file mode 100644 index 50a0863ce6dc..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Cosmics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='BPHYS' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("HLTBphysMon", {})] -PkgName = "TrigBphysMonitoring.TrigBphysMonitoringConfig" -CreatorName = "TrigBphysMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Physics_dictionary.py deleted file mode 100644 index 24f1861e45e2..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Physics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='BPHYS' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("HLTBphysMon", {})] -PkgName = "TrigBphysMonitoring.TrigBphysMonitoringConfig" -CreatorName = "TrigBphysMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Standby_dictionary.py deleted file mode 100644 index 2ea4bcdb98ad..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/BPHYS_Standby_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='BPHYS' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("HLTBphysMon", {})] -PkgName = "TrigBphysMonitoring.TrigBphysMonitoringConfig" -CreatorName = "TrigBphysMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Cosmics_dictionary.py deleted file mode 100644 index 37aca1207a40..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Cosmics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='CALOESD' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers= [" ",] -Config=[] -Config+= [("HLTCaloL2", {}),("HLTCaloCluster", {})] -PkgName = "TrigCaloMonitoring.TrigCaloMonitoringConfig" -CreatorName = "HLTCaloMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Physics_dictionary.py deleted file mode 100644 index d5a7bad39d1e..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Physics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='CALOESD' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers= [" ",] -Config=[] -Config+= [("HLTCaloL2", {"ListOfCellsKeys":['HLT_TrigT2CaloTauCells', ]}),("HLTCaloCluster", {})] -PkgName = "TrigCaloMonitoring.TrigCaloMonitoringConfig" -CreatorName = "HLTCaloMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Standby_dictionary.py deleted file mode 100644 index 55b99f979d53..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALOESD_Standby_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='CALOESD' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers= [" ",] -Config=[] -Config+= [("HLTCaloL2", {}),("HLTCaloCluster", {})] -PkgName = "TrigCaloMonitoring.TrigCaloMonitoringConfig" -CreatorName = "HLTCaloMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Cosmics_dictionary.py deleted file mode 100644 index 122df884df40..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Cosmics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='CALORAW' -RunType='Cosmics' -Recostep='RAW' -MonitoringTriggers= [" ",] -Config=[] -Config+= [("HLTCalo", {}), ("HLTCaloFEB", {"sigma" : 3,})] -PkgName = "TrigCaloMonitoring.TrigCaloMonitoringConfig" -CreatorName = "HLTCaloMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Physics_dictionary.py deleted file mode 100644 index 77ae9f7a7244..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Physics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='CALORAW' -RunType='Physics' -Recostep='RAW' -MonitoringTriggers= [" ",] -Config=[] -Config+= [("HLTCalo", {}), ("HLTCaloFEB", {"sigma" : 4,})] -PkgName = "TrigCaloMonitoring.TrigCaloMonitoringConfig" -CreatorName = "HLTCaloMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Standby_dictionary.py deleted file mode 100644 index 86df37b3c939..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/CALORAW_Standby_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='CALORAW' -RunType='Standby' -Recostep='RAW' -MonitoringTriggers= [" ",] -Config=[] -Config+= [("HLTCalo", {}), ("HLTCaloFEB", {"sigma" : 3,})] -PkgName = "TrigCaloMonitoring.TrigCaloMonitoringConfig" -CreatorName = "HLTCaloMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Cosmics_dictionary.py deleted file mode 100644 index ab9d57c78256..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Cosmics_dictionary.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='EGAMMA' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers= [] -Config=[] -#Config+=[("HLTEgammaMon", {"L2Et" : 3200.0,})] -Config+=[("HLTEgammaNavMon", {"L1EmClus" : 3200.0,})] -#Config+=[("HLTEgammaNavSigTEMon", {"L1EmIsol" : 96000.0,})] -PkgName = "TrigEgammaMonitoring.TrigEgammaMonitoringConfig" -CreatorName = "HLTEgammaMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Physics_dictionary.py deleted file mode 100644 index 68854b854153..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Physics_dictionary.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='EGAMMA' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers= [" ",] -Config=[] -#Config+=[("HLTEgammaMon", {"L2Et" : 3500.0,})] -Config+=[("HLTEgammaNavMon", {"L1EmClus" : 3500.0,})] -#Config+=[("HLTEgammaNavSigTEMon", {"L1EmIsol" : 98000.0,})] -PkgName = "TrigEgammaMonitoring.TrigEgammaMonitoringConfig" -CreatorName = "HLTEgammaMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Standby_dictionary.py deleted file mode 100644 index 176d218df45d..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/EGAMMA_Standby_dictionary.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='EGAMMA' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers= [] -Config=[] -#Config+=[("HLTEgammaMon", {"L2Et" : 3200.0,})] -Config+=[("HLTEgammaNavMon", {"L1EmClus" : 3200.0,})] -#Config+=[("HLTEgammaNavSigTEMon", {"L1EmIsol" : 96000.0,})] -PkgName = "TrigEgammaMonitoring.TrigEgammaMonitoringConfig" -CreatorName = "HLTEgammaMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Cosmics_dictionary.py deleted file mode 100644 index 287f8eeb49a0..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Cosmics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='IDJPSI' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("TrigJpsiMon", { }),] -PkgName = "TrigIDJpsiMonitoring.TrigIDJpsiMonitoringConfig" -CreatorName = "TrigIDJpsiMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Physics_dictionary.py deleted file mode 100644 index c782239f4578..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Physics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='IDJPSI' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("TrigJpsiMon", { }),] -PkgName = "TrigIDJpsiMonitoring.TrigIDJpsiMonitoringConfig" -CreatorName = "TrigIDJpsiMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Standby_dictionary.py deleted file mode 100644 index 79db4762dca5..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDJPSI_Standby_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='IDJPSI' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("TrigJpsiMon", { }),] -PkgName = "TrigIDJpsiMonitoring.TrigIDJpsiMonitoringConfig" -CreatorName = "TrigIDJpsiMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Cosmics_dictionary.py deleted file mode 100644 index 1ff4fdb6ccc2..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Cosmics_dictionary.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='IDTRK' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("MyAtlasExtrapolator", {}), -] -PkgName = "TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig" -CreatorName = "TrigIDtrkMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Physics_dictionary.py deleted file mode 100644 index cf2683999840..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Physics_dictionary.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='IDTRK' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers= [ "BENS_MADE-UP_TRIGGER_NAME",] -Config=[] -Config+= [("MyAtlasExtrapolator", {}), -] -PkgName = "TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig" -CreatorName = "TrigIDtrkMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Standby_dictionary.py deleted file mode 100644 index 94d14e1849d9..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Standby_dictionary.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='IDTRK' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("MyAtlasExtrapolator", {}), -] -PkgName = "TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig" -CreatorName = "TrigIDtrkMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Cosmics_dictionary.py deleted file mode 100644 index b2671e3dad91..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Cosmics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='JET' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers= [] -Config=[] -Config+=[("HLTJetMon",{"EMFractionCut" : 0.7,} )] -PkgName = "TrigJetMonitoring.TrigJetMonitoringConfig" -CreatorName = "TrigJetMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Physics_dictionary.py deleted file mode 100644 index 37f5c6f35070..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Physics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='JET' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers= [" ",] -Config=[] -Config+=[("HLTJetMon",{"EMFractionCut" : 0.8,} )] -PkgName = "TrigJetMonitoring.TrigJetMonitoringConfig" -CreatorName = "TrigJetMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Standby_dictionary.py deleted file mode 100644 index cdf02548d4de..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/JET_Standby_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='JET' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers= [] -Config=[] -Config+=[("HLTJetMon",{"EMFractionCut" : 0.7,} )] -PkgName = "TrigJetMonitoring.TrigJetMonitoringConfig" -CreatorName = "TrigJetMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Cosmics_dictionary.py deleted file mode 100644 index 42bb105ac97b..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Cosmics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='MET' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers= [" ",] -Config=[] -Config+= [("HLTMETMon", {}),] -PkgName = "TrigMETMonitoring.TrigMETMonitoringConfig" -CreatorName = "HLTMETMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Physics_dictionary.py deleted file mode 100644 index fdc080df2dc9..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Physics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='MET' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("HLTMETMon", {}),] -PkgName = "TrigMETMonitoring.TrigMETMonitoringConfig" -CreatorName = "HLTMETMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Standby_dictionary.py deleted file mode 100644 index 3c1ffd117b4f..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Standby_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='MET' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -Config+= [("HLTMETMon", {}),] -PkgName = "TrigMETMonitoring.TrigMETMonitoringConfig" -CreatorName = "HLTMETMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Cosmics_dictionary.py deleted file mode 100644 index 7cdcc0356ee9..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Cosmics_dictionary.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='MINBIAS' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -#Config+=[("MbtsMinBiasMon", {}), ("BcmMinBiasMon", {}), ("HLTMinBiasMon", {}), ("LucidMinBiasMon",{}), ("ZdcMinBiasMon",{})] -Config+=[("HLTMinBiasMon", {})] -PkgName = "TrigMinBiasMonitoring.TrigMinBiasMonitoringConfig" -CreatorName = "TrigMinBiasMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Physics_dictionary.py deleted file mode 100644 index 2745b5b229c4..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Physics_dictionary.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='MINBIAS' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -#Config+=[("MbtsMinBiasMon", {}), ("BcmMinBiasMon", {}), ("HLTMinBiasMon", {}), ("LucidMinBiasMon",{}), ("ZdcMinBiasMon",{})] -Config+=[("HLTMinBiasMon", {})] -PkgName = "TrigMinBiasMonitoring.TrigMinBiasMonitoringConfig" -CreatorName = "TrigMinBiasMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Standby_dictionary.py deleted file mode 100644 index 134cf585d19c..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MINBIAS_Standby_dictionary.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='MINBIAS' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers= [ " ",] -Config=[] -#Config+=[("MbtsMinBiasMon", {}), ("BcmMinBiasMon", {}), ("HLTMinBiasMon", {}), ("LucidMinBiasMon",{}), ("ZdcMinBiasMon",{})] -Config+=[("HLTMinBiasMon", {})] -PkgName = "TrigMinBiasMonitoring.TrigMinBiasMonitoringConfig" -CreatorName = "TrigMinBiasMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Cosmics_dictionary.py deleted file mode 100644 index 6a4098129e29..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Cosmics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='MUON' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers = [ " ",] -Config = [] -Config += [("HLTMuonMon", {"ZTPPtCone20RelCut" : 0.15,}),] -PkgName = "TrigMuonMonitoring.TrigMuonMonitoringConfig" -CreatorName = "TrigMuonMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Physics_dictionary.py deleted file mode 100644 index c612427892ea..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Physics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='MUON' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers = [ " ",] -Config = [] -Config += [("HLTMuonMon", {"ZTPPtCone20RelCut" : 0.15,}),] -PkgName = "TrigMuonMonitoring.TrigMuonMonitoringConfig" -CreatorName = "TrigMuonMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Standby_dictionary.py deleted file mode 100644 index e1416af5805b..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Standby_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='MUON' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers = [ " ",] -Config = [] -Config += [("HLTMuonMon", {"ZTPPtCone20RelCut" : 0.15,}),] -PkgName = "TrigMuonMonitoring.TrigMuonMonitoringConfig" -CreatorName = "TrigMuonMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Cosmics_dictionary.py deleted file mode 100644 index b3e148b346a4..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Cosmics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='TAU' -RunType='Cosmics' -Recostep='ESD' -MonitoringTriggers= ["TauNoCut", "EF_tau45",] -Config=[] -Config+= [("HLTTauMon", {"EffOffTauPtCut" : 20.0,})] -PkgName = "TrigTauMonitoring.TrigTauMonitoringConfig" -CreatorName = "TrigTauMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Physics_dictionary.py deleted file mode 100644 index a6dbba77c4fc..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Physics_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='TAU' -RunType='Physics' -Recostep='ESD' -MonitoringTriggers= ["TauNoCut", "EF_tau45",] -Config=[] -Config+= [("HLTTauMon", {"EffOffTauPtCut" : 25.0,})] -PkgName = "TrigTauMonitoring.TrigTauMonitoringConfig" -CreatorName = "TrigTauMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Standby_dictionary.py deleted file mode 100644 index 0df24f7d7ff0..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/TAU_Standby_dictionary.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -Slice='TAU' -RunType='Standby' -Recostep='ESD' -MonitoringTriggers= ["TauNoCut", "EF_tau45",] -Config=[] -Config+= [("HLTTauMon", {"EffOffTauPtCut" : 15.0,})] -PkgName = "TrigTauMonitoring.TrigTauMonitoringConfig" -CreatorName = "TrigTauMonitoringTool" -Script = "print 'Test script'" diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/env.sh b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/env.sh deleted file mode 100644 index 34dfe70a547a..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/env.sh +++ /dev/null @@ -1,3 +0,0 @@ -export AtlasSetup=/afs/cern.ch/atlas/software/dist/AtlasSetup -alias asetup='source $AtlasSetup/scripts/asetup.sh' -asetup 19.1.3.1,here diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_activeMCK_table.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_activeMCK_table.py deleted file mode 100644 index 3936fa7583f4..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_activeMCK_table.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -#import sqlite3 -import hashlib -import cx_Oracle -conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') -print conn.version -#conn.close() - - - -#tkconn = sqlite3.connect('test.db') #test.db is a database that gets created with ./createDB.sh - -#You can browse the databse with: -#>sqlite3 test.db -#and then inside the interactive environment: -#.tables -#(see more options with .help) - - -c = conn.cursor() - -#c.execute("SELECT * FROM sig_config") -#r = c.fetchone() -#print r - - -c.execute("INSERT INTO activeMCK_table VALUES (1, 'activeMCK', 4)") - -# Commit to the database the new entries and close the connection -conn.commit() -conn.close() diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_cool_madqm.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_cool_madqm.py deleted file mode 100644 index 29a03d099add..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_cool_madqm.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration -from PyCool import cool,coral -import cx_Oracle,sys -from collections import OrderedDict - -dbfile="madqm.sqlite" -#dbname="COMP200" -dbname="CONDBR2" -#Tag = "MDT-GEO-ILINES-01" -#fdname='/MUONALIGN/ILINES' -fdname='/TRIGGER/HLT/MenuAwareDQMonitoring5' - -# main script -# -# WARNING!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -# OBSOLETE: Please use ilines2cool instead ! -# -# remove the old db file so that we can write the new one -try: - import os - os.remove(dbfile) -except: - pass -# ----------------------------------------------------------------------- # -# get database service and open database -# ----------------------------------------------------------------------- # -dbSvc=cool.DatabaseSvcFactory.databaseService() -# database accessed via physical name -dbstring="sqlite://;schema=%s;dbname=%s" % (dbfile,dbname) -try: - db=dbSvc.createDatabase(dbstring) -except Exception,e: - print 'Problem creating database',e - sys.exit(-1) - -print "Created database",dbstring - -# ----------------------------------------------------------------------- # -# setup a folder -# ----------------------------------------------------------------------- # -#spec=cool.RecordSpecification() -payloadSpec = cool.RecordSpecification() -payloadSpec.extend("MCKname",cool.StorageType.String255) -payloadSpec.extend("MCKdata",cool.StorageType.UInt32) -#recspec.extend("geotag",cool.StorageType.String4k) -#recspec.extend("geoversion",cool.StorageType.String4k) -#spec=cool.FolderSpecification(cool.FolderVersioning.MULTI_VERSION,recspec,True) - -folderMode=cool.FolderVersioning.SINGLE_VERSION -folderSpec = cool.FolderSpecification(folderMode, payloadSpec) - -#spec=cool.FolderSpecification(cool.FolderVersioning.SINGLE_VERSION,recspec,True) - -#spec.extend(fdspec,cool.StorageType.String16M) -# folder meta-data - note for Athena this has a special meaning - -#desc='<timeStamp>time</timeStamp><addrHeader><address_header service_type="71" clid="1238547719" /></addrHeader><typeName>CondAttrListCollection</typeName>' - -####<timeStamp>time</timeStamp><addrHeader><address_header service_type="71" clid="40774348" /></addrHeader><typeName>AthenaAttributeList</typeName> - -# create the folder - single version -# last argument is createParents - if true, automatically creates parent folders if needed -# note this will not work if the database already exists - delete mycool.db first -#folder=db.createFolder(fdname,spec,desc,cool.FolderVersioning.MULTI_VERSION,True) - -#folderDescr='<timeStamp>time</timeStamp><addrHeader><address_header service_type="71" clid="1238547719" /></addrHeader><typeName>CondAttrListCollection</typeName>' - -folderDescr='<timeStamp>run-lumi</timeStamp>' -folderDescr+='<addrHeader><address_header service_type="71" clid="1238547719" /></addrHeader>' -# Use CondAttrListCollection if your filder will have several channels and AthenaAttributeList if your folder has only one channel -folderDescr+='<typeName>CondAttrListCollection</typeName>' -#folderDescr+='<typeName>AthenaAttributeList</typeName>' -#folderDescr=getAthenaFolderDescr() -folder=db.createFolder(fdname,folderSpec,folderDescr,True) - -# ----------------------------------------------------------------------- # -# now fill in data - create a record and fill it -# ----------------------------------------------------------------------- # - -data=cool.Record(payloadSpec) -name="MCK" -myMCK = 2 # MCK=2 -# get a reference to the blob -data['MCKname']=name -data['MCKdata']=myMCK -#data['geotag']='none' -#data['geoversion']='Version R1' -#print "clob size ",len(data['data']) -print "data for ", data['MCKname'] -print "Will store this object",data['MCKdata'] - -chann=1 -#Tag="" -# store object with IOV valid from 0-max, channel 0 -# folder.storeObject(0,cool.ValidityKeyMax,data,chann,Tag) -folder.storeObject(0,cool.ValidityKeyMax,data,chann) -#------------------- -data=cool.Record(payloadSpec) - -#tk geotag='ISZT-R06.2011' -#geotag='some geotag' - -myMCK = 3 # MCK=3 -# get a reference to the blob -data['MCKname']=name -data['MCKdata']=myMCK -#print "clob size ",len(data['MCKdata']) -print "data for ", data['MCKname'] -print "Will store this object",data['MCKdata'] - -# now we want to store some MCK for Run: 212967 and LB: 291 - -runNum=212967 -lbkNum=291 - -# iov From Run Lumi(runNum, lbkNum): -# Returns COOL timeStamp built from run and lumi block numbers -my_iov = (int(runNum)<<32) + int(lbkNum) -my_cool_iov = cool.ValidityKey(my_iov) - -#chann=1 -# store object with IOV valid from 0-max, channel 0 -# folder.storeObject(0,cool.ValidityKeyMax,data,chann,Tag) -#folder.storeObject(1293836400000000000,cool.ValidityKeyMax,data,chann,Tag) - -print "min iov = ", my_cool_iov -print "max iov = ", cool.ValidityKeyMax - -#folder.storeObject(1293836400000000000,cool.ValidityKeyMax,data,chann) - -print "Storing object",data['MCKdata'] -folder.storeObject(my_cool_iov,cool.ValidityKeyMax,data,chann) -print "Object stored" - -# store IOV for Jiri's sample having MCK=4 (as example) -# looks like it has the same run number as before - - -db.closeDatabase() -sys.exit() diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_smk2mck_table.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_smk2mck_table.py deleted file mode 100644 index 57b7ded36cee..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/fill_smk2mck_table.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -#import sqlite3 -import hashlib -import cx_Oracle -conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') -print conn.version -#conn.close() - - - -#tkconn = sqlite3.connect('test.db') #test.db is a database that gets created with ./createDB.sh - -#You can browse the databse with: -#>sqlite3 test.db -#and then inside the interactive environment: -#.tables -#(see more options with .help) - - -c = conn.cursor() - -#c.execute("SELECT * FROM sig_config") -#r = c.fetchone() -#print r - -#This DB has five tables: master_table, mt2type, type, type2sig, sig_config - -#c.execute("INSERT INTO cool_table VALUES (1, 'SMK', 'Super Master Key')") - -# here we put initial values to smk2mck_table -c.execute("INSERT INTO smk2mck_table VALUES (1, 'SMK2MCK', 1, 1, 'SMK to MCK correspondence')") -c.execute("INSERT INTO smk2mck_table VALUES (2, 'SMK2MCK', 2, 4, 'SMK to MCK correspondence')") -c.execute("INSERT INTO smk2mck_table VALUES (3, 'SMK2MCK', 3, 5, 'SMK to MCK correspondence')") -c.execute("INSERT INTO smk2mck_table VALUES (4, 'SMK2MCK', 4, 6, 'SMK to MCK correspondence')") - -# here!!! - -# get here active MCK ID -#include("tools.py") -# activeMCK=getActiveKeyMCK('oracle') -#c.execute("INSERT INTO smk2mck_table VALUES (3, 'SMK2MCK', 3, "+str(activeMCK)+", 'SMK to MCK correspondence')") - -# Commit to the database the new entries and close the connection -conn.commit() -conn.close() diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/insertInfoToDB.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/insertInfoToDB.py deleted file mode 100644 index 3b7b3ebd999c..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/insertInfoToDB.py +++ /dev/null @@ -1,351 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -#import sqlite3 -import hashlib -import cx_Oracle -conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') -print conn.version -#conn.close() - - - -#tkconn = sqlite3.connect('test.db') #test.db is a database that gets created with ./createDB.sh - -#You can browse the databse with: -#>sqlite3 test.db -#and then inside the interactive environment: -#.tables -#(see more options with .help) - - -c = conn.cursor() - -#c.execute("SELECT * FROM sig_config") -#r = c.fetchone() -#print r - -#This DB has five tables: master_table, mt2type, type, type2sig, sig_config - -#c.execute("INSERT INTO cool_table VALUES (1, 'SMK', 'Super Master Key')") - -c.execute("INSERT INTO master_table VALUES (1, 'MCK', 'Monitoring Configuration Key')") -#c.execute("SELECT * FROM master_table") -#r = c.fetchone() -#print r - -#c.execute("INSERT INTO smk2mck VALUES (1,1)") - -# Fill up values for different signature groups -- these go in the sig_config table - -mon_tau_config = {} -mon_tau_config["MonitoringTriggers"] = [ - "TauNoCut", "EF_tau45", -] -mon_tau_config["Config"] = [] -mon_tau_config["Config"] += [("HLTTauMon", {"EffOffTauPtCut" : 15.0,})] -mon_tau_config["PkgName"] = "TrigTauMonitoring.TrigTauMonitoringConfig" -mon_tau_config["CreatorName"] = "TrigTauMonitoringTool" -str_mon_tau_config_ = str(mon_tau_config) -#print "hash is ", hashlib.md5(str_mon_tau_config_).hexdigest() -hash_mon_tau_config=hashlib.md5(str_mon_tau_config_).hexdigest() -#hash_mon_tau_config=hash(str_mon_tau_config_) -str_mon_tau_config=str_mon_tau_config_.replace("'",'"') -#print "here", str_mon_tau_config - -mon_tau_config2 = {} -mon_tau_config2["MonitoringTriggers"] = [ - "TauNoCut", "EF_tau45", -] -mon_tau_config2["Config"] = [] -mon_tau_config2["Config"] += [("HLTTauMon", {"EffOffTauPtCut" : 17.0,})] -mon_tau_config2["PkgName"] = "TrigTauMonitoring.TrigTauMonitoringConfig" -mon_tau_config2["CreatorName"] = "TrigTauMonitoringTool" -str_mon_tau_config2_ = str(mon_tau_config2) -#hash_mon_tau_config2=hash(str_mon_tau_config2_) -hash_mon_tau_config2=hashlib.md5(str_mon_tau_config2_).hexdigest() -str_mon_tau_config2=str_mon_tau_config2_.replace("'",'"') - - -mon_tau_config3 = {} -mon_tau_config3["MonitoringTriggers"] = [ - "TauNoCut", "EF_tau45", -] -mon_tau_config3["Config"] = [] -mon_tau_config3["Config"] += [("HLTTauMon", {"EffOffTauPtCut" : 20.0,})] -mon_tau_config3["PkgName"] = "TrigTauMonitoring.TrigTauMonitoringConfig" -mon_tau_config3["CreatorName"] = "TrigTauMonitoringTool" -str_mon_tau_config3_ = str(mon_tau_config3) -#hash_mon_tau_config3=hash(str_mon_tau_config3_) -hash_mon_tau_config3=hashlib.md5(str_mon_tau_config3_).hexdigest() -str_mon_tau_config3=str_mon_tau_config3_.replace("'",'"') - - -mon_egamma_config = {} -mon_egamma_config["MonitoringTriggers"] = [ " ",] -mon_egamma_config["Config"] = [] -mon_egamma_config["Config"] += [("HLTEgammaMon", {"L2Et" : 3500.0,}), -("HLTEgammaNavMon", {"L1EmClus" : 3500.0,}), -("HLTEgammaNavSigTEMon", {"L1EmIsol" : 98000.0,}), -] -mon_egamma_config["PkgName"] = "TrigEgammaMonitoring.TrigEgammaMonitoringConfig" -mon_egamma_config["CreatorName"] = "HLTEgammaMonitoringTool" -str_mon_egamma_config_ = str(mon_egamma_config) -#hash_mon_egamma_config=hash(str_mon_egamma_config_) -hash_mon_egamma_config=hashlib.md5(str_mon_egamma_config_).hexdigest() -str_mon_egamma_config=str_mon_egamma_config_.replace("'",'"') - - -mon_bjet_config = {} -mon_bjet_config["MonitoringTriggers"] = [ " ",] -mon_bjet_config["Config"] = [] -mon_bjet_config["Config"] += [("HLTBjetMon", {"Taggers" : ["IP1D", "IP2D", "IP3D"],})] -mon_bjet_config["PkgName"] = "TrigBjetMonitoring.TrigBjetMonitoringConfig" -mon_bjet_config["CreatorName"] = "TrigBjetMonitoringConfig" -str_mon_bjet_config_ = str(mon_bjet_config) -#hash_mon_bjet_config=hash(str_mon_bjet_config_) -hash_mon_bjet_config=hashlib.md5(str_mon_bjet_config_).hexdigest() -str_mon_bjet_config=str_mon_bjet_config_.replace("'",'"') - -mon_bphys_config = {} -mon_bphys_config["MonitoringTriggers"] = [ " ",] -mon_bphys_config["Config"] = [] -mon_bphys_config["Config"] += [("HLTBphysMon", {})] -mon_bphys_config["PkgName"] = "TrigBphysMonitoring.TrigBphysMonitoringConfig" -mon_bphys_config["CreatorName"] = "TrigBphysMonitoringTool" -str_mon_bphys_config_ = str(mon_bphys_config) -#hash_mon_bphys_config=hash(str_mon_bphys_config_) -hash_mon_bphys_config=hashlib.md5(str_mon_bphys_config_).hexdigest() -str_mon_bphys_config=str_mon_bphys_config_.replace("'",'"') - - -mon_caloraw_config = {} -mon_caloraw_config["MonitoringTriggers"] = [ " ",] -mon_caloraw_config["Config"] = [] -mon_caloraw_config["Config"] += [("HLTCalo", {}), ("HLTCaloFEB", {"sigma" : 2,})] -mon_caloraw_config["PkgName"] = "TrigCaloMonitoring.TrigCaloMonitoringConfig" -mon_caloraw_config["CreatorName"] = "HLTCaloMonitoringTool" -str_mon_caloraw_config_ = str(mon_caloraw_config) -#hash_mon_caloraw_config=hash(str_mon_caloraw_config_) -hash_mon_caloraw_config=hashlib.md5(str_mon_caloraw_config_).hexdigest() -str_mon_caloraw_config=str_mon_caloraw_config_.replace("'",'"') - -mon_caloesd_config = {} -mon_caloesd_config["MonitoringTriggers"] = [ " ",] -mon_caloesd_config["Config"] = [] -mon_caloesd_config["Config"] += [("HLTCaloL2", {}),("HLTCaloCluster", {})] -mon_caloesd_config["PkgName"] = "TrigCaloMonitoring.TrigCaloMonitoringConfig" -mon_caloesd_config["CreatorName"] = "HLTCaloMonitoringTool" -str_mon_caloesd_config_ = str(mon_caloesd_config) -#hash_mon_caloesd_config=hash(str_mon_caloesd_config_) -hash_mon_caloesd_config=hashlib.md5(str_mon_caloesd_config_).hexdigest() -str_mon_caloesd_config=str_mon_caloesd_config_.replace("'",'"') - -mon_jet_config = {} -mon_jet_config["MonitoringTriggers"] = [ " ",] -mon_jet_config["Config"] = [] -mon_jet_config["Config"] += [("HLTJetMon",{"EMFractionCut" : 0.8,} )] -mon_jet_config["PkgName"] = "TrigJetMonitoring.TrigJetMonitoringConfig" -mon_jet_config["CreatorName"] = "TrigJetMonitoringTool" -str_mon_jet_config_ = str(mon_jet_config) -#hash_mon_jet_config=hash(str_mon_jet_config_) -hash_mon_jet_config=hashlib.md5(str_mon_jet_config_).hexdigest() -str_mon_jet_config=str_mon_jet_config_.replace("'",'"') - -mon_met_config = {} -mon_met_config["MonitoringTriggers"] = [ " ",] -mon_met_config["Config"] = [] -mon_met_config["Config"] += [("HLTMETMon", {}), ("HLTMETMon_FEB", {}), ("HLTMETMon_topocl", {}),] -mon_met_config["PkgName"] = "TrigMETMonitoring.TrigMETMonitoringConfig" -mon_met_config["CreatorName"] = "HLTMETMonitoringTool" -str_mon_met_config_ = str(mon_met_config) -#hash_mon_met_config=hash(str_mon_met_config_) -hash_mon_met_config=hashlib.md5(str_mon_met_config_).hexdigest() -str_mon_met_config=str_mon_met_config_.replace("'",'"') - - -mon_minbias_config = {} -mon_minbias_config["MonitoringTriggers"] = [ " ",] -mon_minbias_config["Config"] = [] -#mon_minbias_config["Config"] += [("MbtsMinBiasMon", {}), ("BcmMinBiasMon", {}), ("HLTMinBiasMon", {}), -#("LucidMinBiasMon",{}), ("ZdcMinBiasMon",{}) -#] -mon_minbias_config["Config"] += [("HLTMinBiasMon", {})] -mon_minbias_config["PkgName"] = "TrigMinBiasMonitoring.TrigMinBiasMonitoringConfig" -mon_minbias_config["CreatorName"] = "TrigMinBiasMonitoringTool" -str_mon_minbias_config_ = str(mon_minbias_config) -#hash_mon_minbias_config=hash(str_mon_minbias_config_) -hash_mon_minbias_config=hashlib.md5(str_mon_minbias_config_).hexdigest() -str_mon_minbias_config=str_mon_minbias_config_.replace("'",'"') - -mon_muon_config = {} -mon_muon_config["MonitoringTriggers"] = [ " ",] -mon_muon_config["Config"] = [] -mon_muon_config["Config"] += [("HLTMuonMon", {"ZTPPtCone20RelCut" : 0.15,}),] -mon_muon_config["PkgName"] = "TrigMuonMonitoring.TrigMuonMonitoringConfig" -mon_muon_config["CreatorName"] = "TrigMuonMonitoringTool" -str_mon_muon_config_ = str(mon_muon_config) -#hash_mon_muon_config=hash(str_mon_muon_config_) -hash_mon_muon_config=hashlib.md5(str_mon_muon_config_).hexdigest() -#print hash_mon_muon_config -#print mon_muon_config -#print str_mon_muon_config_ -str_mon_muon_config=str_mon_muon_config_.replace("'",'"') - -mon_idtrk_config = {} -mon_idtrk_config["MonitoringTriggers"] = [ " ",] -mon_idtrk_config["Config"] = [] -mon_idtrk_config["Config"] += [("HLTIDpvtx", {"OnlineEfficiancyRangeCutY" : 0.02,}), ("HLTIDtrkDump", {"MinSiHits" : 7,}), -("tidatool", {}), ("MyAtlasExtrapolator", {}), ("MyCaloExtrapolatorTool", {}), ("HLTIDZeeTag", {}) -] -mon_idtrk_config["PkgName"] = "TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig" -mon_idtrk_config["CreatorName"] = "TrigIDtrkMonitoringTool" -str_mon_idtrk_config_ = str(mon_idtrk_config) -#hash_mon_idtrk_config=hash(str_mon_idtrk_config_) -hash_mon_idtrk_config=hashlib.md5(str_mon_idtrk_config_).hexdigest() -str_mon_idtrk_config=str_mon_idtrk_config_.replace("'",'"') - -mon_idjpsi_config = {} -mon_idjpsi_config["MonitoringTriggers"] = [ " ",] -mon_idjpsi_config["Config"] = [] -mon_idjpsi_config["Config"] += [("TrigJpsiMon", { }),] -mon_idjpsi_config["PkgName"] = "TrigIDJpsiMonitoring.TrigIDJpsiMonitoringConfig" -mon_idjpsi_config["CreatorName"] = "TrigIDJpsiMonitoringTool" -str_mon_idjpsi_config_ = str(mon_idjpsi_config) -#hash_mon_idjpsi_config=hash(str_mon_idjpsi_config_) -hash_mon_idjpsi_config=hashlib.md5(str_mon_idjpsi_config_).hexdigest() -str_mon_idjpsi_config=str_mon_idjpsi_config_.replace("'",'"') - - -# Simple script that could go in the third argument of the sig_config table - -testTauScript = 'print "Test script" ' -str_testTauScript = str(testTauScript) -hash_mon_tau_script=hashlib.md5(testTauScript).hexdigest() -#hash_mon_tau_config=hash(testTauScript) - -testTauScript2 = 'print "Test script" ' -str_testTauScript2=str(testTauScript2) -hash_mon_tau_script2=hashlib.md5(testTauScript2).hexdigest() - -testEgammaScript = 'print "Test script" ' -str_testEgammaScript =str(testEgammaScript) -hash_mon_egamma_script=hashlib.md5(testEgammaScript).hexdigest() - -testBjetScript = 'print "Test script" ' -str_testBjetScript=str(testBjetScript) -hash_mon_bjet_script=hashlib.md5(testBjetScript).hexdigest() - -testBphysScript = 'print "Test script" ' -str_testBphysScript=str(testBphysScript) -hash_mon_bphys_script=hashlib.md5(testBphysScript).hexdigest() - -testCaloRawScript = 'print "Test script" ' -str_testCaloRawScript = str(testCaloRawScript) -hash_mon_caloraw_script=hashlib.md5(testCaloRawScript).hexdigest() - -testCaloEsdScript = 'print "Test script" ' -str_testCaloEsdScript = str(testCaloEsdScript) -hash_mon_caloesd_script=hashlib.md5(testCaloEsdScript).hexdigest() - -testJetScript = 'print "Test script" ' -str_testJetScript = str(testJetScript) -hash_mon_jet_script=hashlib.md5(testJetScript).hexdigest() - -testMETScript = 'print "Test script" ' -str_testMETScript=str(testMETScript) -hash_mon_met_script=hashlib.md5(testMETScript).hexdigest() - -testMinBiasScript = 'print "Test script" ' -str_testMinBiasScript= str(testMinBiasScript) -hash_mon_minbias_script=hashlib.md5(testMinBiasScript).hexdigest() - -testMuonScript = 'print "Test script" ' -str_testMuonScript = str(testMuonScript) -hash_mon_muon_script=hashlib.md5(testMuonScript).hexdigest() - -testIDtrkScript = 'print "Test script" ' -str_testIDtrkScript= str(testIDtrkScript) -hash_mon_idtrk_script=hashlib.md5(testIDtrkScript).hexdigest() - -testIDJpsiScript = 'print "Test script" ' -str_testIDJpsiScript = str(testIDJpsiScript) -hash_mon_idjpsi_script=hashlib.md5(testIDJpsiScript).hexdigest() - -# Put values in the database. This will go under the sig_config table with names "TAU" and "EGAMMA", etc. -c.execute("INSERT INTO sig_config VALUES (1,'TAU','ESD','"+str_mon_tau_config+"','"+str_testTauScript+"','"+hash_mon_tau_config+"','"+hash_mon_tau_script+"')") -c.execute("INSERT INTO sig_config VALUES (2,'EGAMMA','ESD','"+str_mon_egamma_config+"','"+str_testEgammaScript+"','"+hash_mon_egamma_config+"','"+hash_mon_tau_script+"')") -c.execute("INSERT INTO sig_config VALUES (3,'BJET','ESD','"+str_mon_bjet_config+"','"+str_testBjetScript+"','"+hash_mon_bjet_config+"','"+hash_mon_bjet_script+"')") -c.execute("INSERT INTO sig_config VALUES (4,'BPHYS','ESD','"+str_mon_bphys_config+"','"+str_testBphysScript+"','"+hash_mon_bphys_config+"','"+hash_mon_bphys_script+"')") -c.execute("INSERT INTO sig_config VALUES (5,'JET','ESD','"+str_mon_jet_config+"','"+str_testJetScript+"','"+hash_mon_jet_config+"','"+hash_mon_jet_script+"')") -c.execute("INSERT INTO sig_config VALUES (6,'MET','ESD','"+str_mon_met_config+"','"+str_testMETScript+"','"+hash_mon_met_config+"','"+hash_mon_met_script+"')") -c.execute("INSERT INTO sig_config VALUES (7,'MINBIAS','ESD','"+str_mon_minbias_config+"','"+str_testMinBiasScript+"','"+hash_mon_minbias_config+"','"+hash_mon_minbias_script+"')") -c.execute("INSERT INTO sig_config VALUES (8,'MUON','ESD','"+str_mon_muon_config+"','"+str_testMuonScript+"','"+hash_mon_muon_config+"','"+hash_mon_muon_script+"')") -c.execute("INSERT INTO sig_config VALUES (9,'IDTRK','ESD','"+str_mon_idtrk_config+"','"+str_testIDtrkScript+"','"+hash_mon_idtrk_config+"','"+hash_mon_idtrk_script+"')") -c.execute("INSERT INTO sig_config VALUES (10,'IDJPSI','ESD','"+str_mon_idjpsi_config+"','"+str_testIDJpsiScript+"','"+hash_mon_idjpsi_config+"','"+hash_mon_idjpsi_script+"')") -c.execute("INSERT INTO sig_config VALUES (11,'CALORAW','RAW','"+str_mon_caloraw_config+"','"+str_testCaloRawScript+"','"+hash_mon_caloraw_config+"','"+hash_mon_caloraw_script+"')") -c.execute("INSERT INTO sig_config VALUES (12,'CALOESD','ESD','"+str_mon_caloesd_config+"','"+str_testCaloEsdScript+"','"+hash_mon_caloesd_config+"','"+hash_mon_caloesd_script+"')") - -#c.execute("INSERT INTO sig_config VALUES (11,'TAU','ESD','"+str_mon_tau_config2+"','"+str_testTauScript2+"','"+hash_mon_tau_config2+"','"+hash_mon_tau_script2+"')") -#c.execute("INSERT INTO sig_config VALUES (12,'TAU','ESD','"+str_mon_tau_config3+"','"+str_testTauScript2+"','"+hash_mon_tau_config3+"','"+hash_mon_tau_script2+"')") - -# fill type table -c.execute("INSERT INTO type_table VALUES (1,'Physics')") -c.execute("INSERT INTO type_table VALUES (2,'Standby')") -c.execute("INSERT INTO type_table VALUES (3,'Cosmics')") - -# Create the correspondence between the master_table and type -c.execute("INSERT INTO mt2type VALUES (1,1)") -c.execute("INSERT INTO mt2type VALUES (1,2)") -c.execute("INSERT INTO mt2type VALUES (1,3)") - -# Create the correspondence between type and sig_config -c.execute("INSERT INTO type2sig VALUES (1,1)") -c.execute("INSERT INTO type2sig VALUES (1,2)") -c.execute("INSERT INTO type2sig VALUES (1,3)") -c.execute("INSERT INTO type2sig VALUES (1,4)") -c.execute("INSERT INTO type2sig VALUES (1,5)") -c.execute("INSERT INTO type2sig VALUES (1,6)") -c.execute("INSERT INTO type2sig VALUES (1,7)") -c.execute("INSERT INTO type2sig VALUES (1,8)") -c.execute("INSERT INTO type2sig VALUES (1,9)") -c.execute("INSERT INTO type2sig VALUES (1,10)") -c.execute("INSERT INTO type2sig VALUES (1,11)") -c.execute("INSERT INTO type2sig VALUES (1,12)") -#c.execute("INSERT INTO type2sig VALUES (1,13)") - -c.execute("INSERT INTO type2sig VALUES (2,1)") -#c.execute("INSERT INTO type2sig VALUES (2,11)") -c.execute("INSERT INTO type2sig VALUES (2,2)") -c.execute("INSERT INTO type2sig VALUES (2,3)") -c.execute("INSERT INTO type2sig VALUES (2,4)") -c.execute("INSERT INTO type2sig VALUES (2,5)") -c.execute("INSERT INTO type2sig VALUES (2,6)") -c.execute("INSERT INTO type2sig VALUES (2,7)") -c.execute("INSERT INTO type2sig VALUES (2,8)") -c.execute("INSERT INTO type2sig VALUES (2,9)") -c.execute("INSERT INTO type2sig VALUES (2,10)") -c.execute("INSERT INTO type2sig VALUES (2,11)") -c.execute("INSERT INTO type2sig VALUES (2,12)") -#c.execute("INSERT INTO type2sig VALUES (2,13)") - -c.execute("INSERT INTO type2sig VALUES (3,1)") -c.execute("INSERT INTO type2sig VALUES (3,2)") -c.execute("INSERT INTO type2sig VALUES (3,3)") -c.execute("INSERT INTO type2sig VALUES (3,4)") -c.execute("INSERT INTO type2sig VALUES (3,5)") -c.execute("INSERT INTO type2sig VALUES (3,6)") -c.execute("INSERT INTO type2sig VALUES (3,7)") -c.execute("INSERT INTO type2sig VALUES (3,8)") -c.execute("INSERT INTO type2sig VALUES (3,9)") -c.execute("INSERT INTO type2sig VALUES (3,10)") -c.execute("INSERT INTO type2sig VALUES (3,11)") -c.execute("INSERT INTO type2sig VALUES (3,12)") -#c.execute("INSERT INTO type2sig VALUES (3,13)") - -#c.execute("SELECT * FROM sig_config") -#r = c.fetchone() -#print r - -# Commit to the database the new entries and close the connection -conn.commit() -conn.close() diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/readConfigFromDB.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/readConfigFromDB.py deleted file mode 100644 index 6c5c1ed80ebc..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/readConfigFromDB.py +++ /dev/null @@ -1,213 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -def readSliceConfigFromDB(dbconnection, MCK, runtype, slice_name, reco_step, *vargs, **kwargs): - #this script returns config for a certain slice - # MCK can be: any MCK number - # 'ACTIVE_KEY' - # 'LAST_MCK' - - include("TrigHLTMonitoring/tools.py") - - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - -# RunType=[] -# ConfigId=[] -# ConfigName=[] -# ConfigRecostep=[] -# ConfigCode=[] -# ConfigCodeHash=[] -# ConfigScript=[] -# ConfigScriptHash=[] - - if MCK=='ACTIVE_KEY': - current_MCK=getActiveKeyMCK('oracle') - elif MCK=='LAST_MCK': - current_MCK=getMaxMCK('oracle') - else: - current_MCK=MCK - - - # we have to get CLOBS separately, when fetchall they get spoiled... known problem with cx_Oracle - c.execute("SELECT sc_data FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK and type_table.tt_name=:my_runtype and sig_config.sc_name=:my_slice_name and sig_config.recostep=:my_reco_step", my_MCK=current_MCK, my_runtype=runtype, my_slice_name=slice_name, my_reco_step=reco_step) - - for value, in c: - #print "Doing some manipulation with value", value.read() - #ConfigCode.append(eval(str(value))) - ConfigCode=eval(str(value)) - - return ConfigCode - - -def readAllConfigFromDB(dbconnection, MCK, *vargs, **kwargs): - #this script returns configs, run_type, slice_name, reco_step for all slices - # MCK can be: any MCK number - # 'ACTIVE_KEY' - # 'LAST_MCK' - - include("tools.py") - - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - - RunType=[] - ConfigId=[] - ConfigName=[] - ConfigRecostep=[] - ConfigCode=[] - ConfigCodeHash=[] - ConfigScript=[] - ConfigScriptHash=[] - - if MCK=='ACTIVE_KEY': - current_MCK=getActiveKeyMCK('oracle') - elif MCK=='LAST_MCK': - current_MCK=getMaxMCK('oracle') - else: - current_MCK=MCK - - - # we have to get CLOBS separately, when fetchall they get spoiled... known problem with cx_Oracle - - c.execute("SELECT sc_data FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=current_MCK) - - for value, in c: - #print "Doing some manipulation with value", value.read() - #ConfigCode.append(eval(str(value))) - ConfigCode.append(eval(str(value))) - - c.execute("SELECT sc_script FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=current_MCK) - - - for script_value, in c: - #print "Doing some manipulation with value", value.read() - ConfigScript.append(str(script_value)) - - - c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=current_MCK) - - - for x in c.fetchall(): - #print "Printing...", x - RunType.append(x[6]) - ConfigId.append(x[9]) - ConfigName.append(x[10]) - ConfigRecostep.append(x[11]) - #tkConfigCode.append(eval(str(x[12]))) - #print "here! ", type(x[12]) - #ConfigCode.append(eval(str(value))) - #ConfigScript.append(str(x[13])) - ConfigCodeHash.append(x[14]) - ConfigScriptHash.append(x[15]) - - - return ConfigCode, RunType, ConfigName, ConfigRecostep - - - - - - - - - -################################################### - -# -# -# -# -# -# -# -# -# runType=[] -# configId=[] -# configName=[] -# configRecostep=[] -# configCode=[] -# configScript=[] -# configCodeHash=[] -# configScriptHash=[] -# -# # we have to get CLOBS separately, when fetchall they get spoiled... known problem with cx_Oracle -# c.execute("SELECT sc_data FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_conf\ -#ig_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my\ -#_MCK=MCK) -# -# for value, in c: -# #print "Doing some manipulation with value", value.read() -# ConfigCode.append(eval(str(value))) -# -# c.execute("SELECT sc_script FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_co\ -#nfig_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", \ -#my_MCK=MCK) -# -# for script_value, in c: -# #print "Doing some manipulation with value", value.read() -# ConfigScript.append(str(script_value)) -# -# -# c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=\ -#sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=M\ -#CK) -# -# -# -# -# runType,configId,configName,configRecostep,configCode,configScript,configCodeHash,configScriptHash=getConfigForMCK('oracle', MCK) \ -# -# for index,conf_name in enumerate(configName): -# if runType[index]==runtype and conf_name==slice_name and configRecostep[index]==reco_step: -# return configId[index], configCode[index], configScript[index],configCodeHash[index],configScriptHash[index] -# #else: -# # print "No such configuration in the active key configuration!" -# - - - -#def readConfigFromDB(dbconnection, masterkey, runtype, slice_name, reco_step, *vargs, **kwargs): -## if dbconnection=='sqlite': -## import sqlite3 -## conn = sqlite3.connect('test.db') -# if dbconnection=='oracle': -# import sys -# import cx_Oracle -# conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') -# c = conn.cursor() -# c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE type_table.tt_name=:my_runtype and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.master_id=:my_masterkey and mt2type.type_id1=type_table.tt_id and sig_config.sc_name=:my_slice_name and sig_config.recostep=:my_reco_step",my_runtype=runtype, my_masterkey=masterkey, my_slice_name=slice_name, my_reco_step=reco_step) -# #c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE type_table.tt_name=? and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.master_id=? and mt2type.type_id1=type_table.tt_id and sig_config.sc_name=? and sig_config.recostep=?", (runtype, masterkey, slice_name, reco_step)) -# #c.execute("SELECT * FROM type2sig") -# #print "la la ", runtype -# #c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE type_table.tt_name=:lala",lala=runtype) -# #c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE type_table.tt_name='Physics'") -# #ff = c.fetchall() -# #print "test ", ff -# -# config_list_of_dict = [] -# -# config_list = c.fetchall() -# -# if len(config_list)==0: -# print "No entry for this slice!" -# return 0 -# else: -# for x in config_list: -# print "Printing...", x -# stringConfig=str(x[12]) # this is string, not dictionary -# dictConfig=eval(stringConfig) # convert string to dictionary -# # exec montoolconfigCode -# print "All configuration info in the database for slice ", slice_name, " is the following:\n",dictConfig -# # print "length.. ", len(montool_config) -# config_list_of_dict.append(dictConfig) -# return config_list_of_dict -# -# conn.close() diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/readDictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/readDictionary.py deleted file mode 100644 index 9e4dfe172363..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/readDictionary.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -include("tools.py") - -def readDictionary(*vargs, **kwargs): - # read data from all dictionaries available - import sys - import hashlib - slice_list = ['TAU','EGAMMA','JET','BJET','BPHYS','MET','MINBIAS','MUON','IDTRK','IDJPSI','CALORAW','CALOESD'] - slice_match_pattern = ['TAU','EGAMMA','JET','BJET','BPHYS','MET','MINBIAS','MUON','T','JPSI','Calo','Calo'] - run_type_list = ['Physics', 'Standby', 'Cosmics'] - dictionaries=[] - - ConfigNameMissing=[] - ConfigTypeMissing=[] - ConfigRecostepMissing=[] - ConfigCodeMissing=[] - ConfigCodeHashMissing=[] - ConfigScriptMissing=[] - ConfigScriptHashMissing=[] - slice_match_pattern_list=[] - - - - for sl_index, sl in enumerate(slice_list): - for rtl in run_type_list: - dictpath = "dictionaries/" - my_dictionary_file=dictpath+sl+"_"+rtl+"_"+"dictionary.py" - #sys.path.insert(0, dictpath) - sys.path.insert(1, dictpath) # they say it is better to use 1, not 0 ... - #sys.path.append(dictpath) - #sys.path.append("dictionaries") - #import my_dictionary_file - #sys.path.append(os.path.abspath(my_dictionary_file)) - #print "here", my_dictionary_file - #print "non zero file ", is_non_zero_file(my_dictionary_file) - my_dictionary=sl+"_"+rtl+"_"+"dictionary" - #tmp = "dictionaries."+my_dictionary - my_dict=sl+"_"+rtl+"_"+"dict" - #print "la la ", my_dictionary - #print my_dict - #print ("import %s as %s" % (my_dictionary,my_dict)) - if is_non_zero_file(my_dictionary_file): - #exec "import %s as %s" % (my_dictionary,my_dict) - #for mod in sys.modules.values(): - # reload(mod) - #imp.reload(my_dictionary) - - # Now we need to delete imported module first because in python interactive mode it does not get updated imported file.. - if my_dictionary in sys.modules: - del(sys.modules[my_dictionary]) - exec "import %s as %s" % (my_dictionary,my_dict) - - my_dict=eval(my_dict) - #print "checking reading dictionary.. ", my_dict - #print my_dict.Slice - dictionaries.append(my_dict) - slice_match_pattern_list.append(slice_match_pattern[sl_index]) - else: - #print "Dictionary is empty or does not exist! We leave active key DB values for this dictionary" - print "Dictionary is empty or does not exist! We leave release values for this dictionary and put None to config code and config script of DB entries" - ConfigNameMissing.append(sl) - ConfigTypeMissing.append(rtl) - print "missing dict ", sl, " Type ", rtl - if sl.count("RAW"): - ConfigRecostepMissing.append('RAW') - else: - ConfigRecostepMissing.append('ESD') - #ConfigRecostepMissing.append('ESD') # put it for now, later have to change!!! - config_code_missing=None - #config_code_missing=0 - str_code_missing=str(config_code_missing) - ConfigCodeMissing.append(config_code_missing) - hash_code_missing=hashlib.md5(str_code_missing).hexdigest() - ConfigCodeHashMissing.append(hash_code_missing) - #config_script_missing=0 - config_script_missing=None - str_script_missing=str(config_script_missing) - ConfigScriptMissing.append(config_script_missing) - hash_script_missing=hashlib.md5(str_script_missing).hexdigest() - ConfigScriptHashMissing.append(hash_script_missing) - # put here dics from DB? Or pass them somehow to upload that we know which dics are they - #print dictionaries - - - ConfigName=[] - ConfigRunType=[] - ConfigRecostep=[] - ConfigCode=[] - ConfigCodeHash=[] - ConfigScript=[] - ConfigScriptHash=[] - - #dictionaries = [tau_dict,egamma_dict,jet_dict] - - for ndict, dict in enumerate(dictionaries): - print "dict ", dict.Slice, " Type ", dict.RunType - ConfigRunType.append(dict.RunType) - ConfigName.append(dict.Slice) - ConfigRecostep.append(dict.Recostep) - ConfigScript.append(dict.Script) - ConfigScriptHash.append(hashlib.md5(dict.Script).hexdigest()) - #print "hash script ", hashlib.md5(dict.Script).hexdigest() - d={} - d["MonitoringTriggers"]=dict.MonitoringTriggers - d["Config"]=dict.Config - d["PkgName"]=dict.PkgName - d["CreatorName"]=dict.CreatorName - #print "dict d ", d - #print ConfigName - #print ConfigRunType - #print ConfigRecostep - #print ConfigCode - - if validateDictionary(d,dict.Slice,slice_match_pattern_list[ndict]): # put tool_match_pattern[ndict] as a second argument - #print "HERE!!!!!!!" - #print isDictionaryLikeInRelease(d) - if isDictionaryLikeInRelease(d)==0: - ConfigCode.append(d) - d_str = str(d) - #print "String d: " , d_str - d_hash = hashlib.md5(d_str).hexdigest() - #print "hash ", d_hash - ConfigCodeHash.append(d_hash) - #return ConfigName,ConfigRecostep,MontoolConfigCode,MontoolConfigScript - elif isDictionaryLikeInRelease(d)==1: - # in this case we have all parameters like in release and therefore we put "None" to config in the DB - none_str = str(None) - ConfigCode.append(None) - none_str_hash=hashlib.md5(none_str).hexdigest() - ConfigCodeHash.append(none_str_hash) - else: - print "Check dictionaries!" - return 0 - - # append missing dictionaries to others - - for i, entry in enumerate(ConfigNameMissing): - ConfigName.append(entry) - ConfigRunType.append(ConfigTypeMissing[i]) - ConfigRecostep.append(ConfigRecostepMissing[i]) - ConfigCode.append(ConfigCodeMissing[i]) - ConfigCodeHash.append(ConfigCodeHashMissing[i]) - ConfigScript.append(ConfigScriptMissing[i]) - ConfigScriptHash.append(ConfigScriptHashMissing[i]) - - - #return ConfigName,ConfigRunType,ConfigRecostep,ConfigCode,ConfigCodeHash,ConfigScript,ConfigScriptHash,ConfigNameMissing,ConfigTypeMissing,ConfigRecostepMissing - return ConfigName,ConfigRunType,ConfigRecostep,ConfigCode,ConfigCodeHash,ConfigScript,ConfigScriptHash diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/requirements b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/requirements deleted file mode 100644 index 7a63496b7805..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/requirements +++ /dev/null @@ -1,47 +0,0 @@ -package TrigHLTMonitoring - -author Martin zur Nedden <nedden@mail.cern.ch> - -use AtlasPolicy AtlasPolicy-* -#use AtlasCLHEP AtlasCLHEP-* External -use GaudiInterface GaudiInterface-* External -use AthenaBaseComps AthenaBaseComps-* Control - -private -apply_tag ROOTGraphicsLibs -apply_tag ROOTMathLibs -end_private - -use StoreGate StoreGate-* Control -use AthenaMonitoring AthenaMonitoring-* Control - -use TrigSteeringEvent TrigSteeringEvent-* Trigger/TrigEvent -use TrigDecisionTool TrigDecisionTool-* Trigger/TrigAnalysis -#use TrigDecision TrigDecision-* Trigger/TrigEvent -use TrigConfigSvc TrigConfigSvc-* Trigger/TrigConfiguration - -use LumiBlockComps LumiBlockComps-* LumiBlock -#use LumiBlockData LumiBlockData-* LumiBlock -#use AtlasCOOL AtlasCOOL-* External -#use AtlasCORAL AtlasCORAL-* External -#use CoraCool CoraCool-* Database - - -private -use AthenaKernel AthenaKernel-* Control -use AtlasBoost AtlasBoost-* External -use AtlasROOT AtlasROOT-* External -use EventInfo EventInfo-* Event -use AtlasCORAL AtlasCORAL-* External -use AthenaPoolUtilities AthenaPoolUtilities-* Database/AthenaPOOL -end_private - - - -#use AnalysisTriggerEvent AnalysisTriggerEvent-* PhysicsAnalysis/AnalysisTrigger - - -apply_pattern declare_joboptions files="*.py" -apply_pattern declare_python_modules files="*.py" -apply_pattern dual_use_library files=*.cxx -apply_pattern component_library \ No newline at end of file diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/schema.sql b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/schema.sql deleted file mode 100644 index 12552f7a6be8..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/schema.sql +++ /dev/null @@ -1,40 +0,0 @@ -CREATE TABLE mck_table ( - mck_id NUMBER(10), - mck_default BOOLEAN, - mck_creator VARCHAR2(100), - mck_creation_date VARCHAR2(100), - mck_comment VARCHAR2(1000), - CONSTRAINT mck_pk PRIMARY KEY(mck_id), - CONSTRAINT mck_default_nn CHECK(mck_default IS NOT NULL) -); -CREATE TABLE smck_table ( - smck_id NUMBER(10), - smck_slice_type VARCHAR2(10), - smck_slice_version VARCHAR2(10), - smck_config CLOB, - smck_config_hash VARCHAR2(200), - smck_creator VARCHAR2(100), - smck_creation_date VARCHAR2(100), - smck_comment VARCHAR2(1000), - CONSTRAINT smck_pk PRIMARY KEY(smck_id), - CONSTRAINT smck_slice_type_nn CHECK(smck_slice_type IS NOT NULL), - CONSTRAINT smck_slice_version_u UNIQUE(smck_slice_version), - CONSTRAINT smck_slice_version_nn CHECK(smck_slice_version IS NOT NULL), - CONSTRAINT smck_config_nn CHECK(smck_config IS NOT NULL), - CONSTRAINT smck_config_hash_nn CHECK(smck_config_hash IS NOT NULL), -); -CREATE TABLE mck_to_smck_link ( - link_mck NUMBER(10), - link_smck NUMBER(10), - CONSTRAINT link_u UNIQUE(link_mck,link_smck), - CONSTRAINT link_mck_nn CHECK(link_mck IS NOT NULL), - CONSTRAINT link_mck_fk FOREIGN KEY(link_mck) REFERENCES mck_table(mck_id), - CONSTRAINT link_smck_nn CHECK(link_smck IS NOT NULL), - CONSTRAINT link_smck_fk FOREIGN KEY(link_smck) REFERENCES smck_table(smck_id), -); -CREATE INDEX mck_id_index ON mck_table(mck_id); -CREATE INDEX mck_default_index ON mck_table(mck_default); -CREATE INDEX smck_id_index ON smck_table(smck_id); -CREATE INDEX smck_config_hash_index ON smck_table(smck_config_hash); -CREATE INDEX link_mck_index ON mck_to_smck_link(link_mck); -CREATE INDEX link_smck_index ON mck_to_smck_link(link_smck); diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/smk2mck_del.sql b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/smk2mck_del.sql deleted file mode 100644 index e83e38f2e5a3..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/smk2mck_del.sql +++ /dev/null @@ -1,5 +0,0 @@ -DROP INDEX smk_ind; -DROP INDEX mck_ind; -ALTER TABLE smk2mck_table DROP CONSTRAINT smk2mck_pk; -DROP TABLE smk2mck_table; - diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/smk2mck_schema.sql b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/smk2mck_schema.sql deleted file mode 100644 index e1301fd4148e..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/smk2mck_schema.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE smk2mck_table ( - smk2mck_id NUMBER(10), - smk2mck_name VARCHAR2(200), - smk2mck_SMK NUMBER(10), - smk2mck_MCK NUMBER(10), - smk2mck_comment VARCHAR2(2000), - CONSTRAINT smk2mck_pk PRIMARY KEY(smk2mck_id) -); -CREATE INDEX smk_ind ON smk2mck_table(smk2mck_SMK); -CREATE INDEX mck_ind ON smk2mck_table(smk2mck_MCK); - diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/tools.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/tools.py deleted file mode 100644 index 9ab327c6bc2b..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/tools.py +++ /dev/null @@ -1,1001 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -def is_non_zero_file(fpath): - import os - #return True if os.path.isfile(fpath) and os.path.getsize(fpath) > 0 else False - #return True if os.path.isfile(fpath) and os.stat(fpath).st_size > 0 else False - if os.path.exists(fpath): - fi=open(fpath) - if not fi.read(3): #avoid reading entire file. - print "Dictionary file is empty!" - fi.close() - return False - elif fi.readline().isspace() or fi.readline()=="" or fi.readline()==" " or fi.readline()=="\n" or fi.readline()=="\0": - print "Dictionary file contains only empty spaces!" - fi.close() - return False - else: - fi.close() - return True - else: - print "Dictionary file does not exist!" - return False - -def getMaxMCK(dbconnection, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - c.execute("SELECT max(mt_id) FROM master_table") - - for x in c.fetchall(): - #print x[0] - max_id = x[0] - - conn.close() - return max_id - - -def getMaxTypeId(dbconnection, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - c.execute("SELECT max(tt_id) FROM type_table") - - for x in c.fetchall(): - #print x[0] - max_id = x[0] - - conn.close() - return max_id - -def getMaxConfigId(dbconnection, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - c.execute("SELECT max(sc_id) FROM sig_config") - - for x in c.fetchall(): - #print x[0] - max_id = x[0] - - conn.close() - return max_id - - -def getActiveKeyMCK(dbconnection, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - # get first active key from COOL - # look at the table of correspondence COOL SMK <--> MCK - # return MCK - # for now let's return the last MCK in the table - # all before was not correct - # we take an active key MCK as the last entry in the table activeMCK_table - - #c.execute("SELECT max(mt_id) FROM master_table") - c.execute("SELECT max(activeMCK_id) FROM activeMCK_table") - - for x in c.fetchall(): - #print x[0] - max_active_MCK_ID = x[0] - - c.execute("SELECT activeMCK FROM activeMCK_table WHERE activeMCK_id=:my_MCK_id",my_MCK_id=max_active_MCK_ID) - for x in c.fetchall(): - #print x[0] - active_MCK=x[0] - - conn.close() - return active_MCK - -def getConfigForMCK(dbconnection, MCK, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - - RunType=[] - ConfigId=[] - ConfigName=[] - ConfigRecostep=[] - ConfigCode=[] - ConfigCodeHash=[] - ConfigScript=[] - ConfigScriptHash=[] - - - # we have to get CLOBS separately, when fetchall they get spoiled... known problem with cx_Oracle - c.execute("SELECT sc_data FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=MCK) - - for value, in c: - #print "Doing some manipulation with value", value.read() - ConfigCode.append(eval(str(value))) - - #print "here 2" - - c.execute("SELECT sc_script FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=MCK) - - for script_value, in c: - ConfigScript.append(str(script_value)) - - - c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=MCK) - - - for x in c.fetchall(): - #print "Printing...", x - RunType.append(x[6]) - ConfigId.append(x[9]) - ConfigName.append(x[10]) - ConfigRecostep.append(x[11]) - ConfigCodeHash.append(x[14]) - ConfigScriptHash.append(x[15]) - - conn.close() - return RunType,ConfigId,ConfigName,ConfigRecostep,ConfigCode,ConfigScript,ConfigCodeHash,ConfigScriptHash - - -def getConfigCodeForMCK(dbconnection, MCK, runtype, slice_name, reco_step, *vargs, **kwargs): - runType=[] - configId=[] - configName=[] - configRecostep=[] - configCode=[] - configScript=[] - configCodeHash=[] - configScriptHash=[] - - runType,configId,configName,configRecostep,configCode,configScript,configCodeHash,configScriptHash=getConfigForMCK(dbconnection, MCK) - for index,conf_name in enumerate(configName): - - if runType[index]==runtype and conf_name==slice_name and configRecostep[index]==reco_step: - return configId[index], configCode[index], configScript[index],configCodeHash[index],configScriptHash[index] - #else: - # print "No such configuration in the active key configuration!" - -def getTypeIDForMCK(dbconnection, MCK, RunType, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - TypeID=[] - runType=[] - - c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=MCK) - - - for x in c.fetchall(): - #print "Printing...", x - TypeID.append(x[5]) - runType.append(x[6]) - - conn.close() - - for i,type in enumerate(runType): - if type==RunType: - return TypeID[i] - - -def getAvailableTypeIDsForGivenConfig(dbconnection, config_id, run_type, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - TypeID=[] - myTypeID=[] - runType=[] - - c.execute("SELECT * FROM type_table, type2sig, sig_config WHERE type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and sig_config.sc_id=:my_config_id", my_config_id=config_id) - - for x in c.fetchall(): - #print "Printing...", x - TypeID.append(x[0]) - runType.append(x[1]) - - for i,type in enumerate(runType): - if type==run_type: - myTypeID.append(TypeID[i]) - - if len(myTypeID)>0: - return myTypeID - else: - return 0 - - - -def getConfigIdForMCK(dbconnection, MCK, runtype, slice_name, reco_step, *vargs, **kwargs): - runType=[] - configId=[] - configName=[] - configRecostep=[] - configCode=[] - configScript=[] - configCodeHash=[] - configScriptHash=[] - - runType,configId,configName,configRecostep,configCode,configScript,configCodeHash,configScriptHash=getConfigForMCK(dbconnection, MCK) - for index,conf_name in enumerate(configName): - if runType[index]==runtype and conf_name==slice_name and configRecostep[index]==reco_step: - return configId[index] - #else: - # print "No such configuration in the active key configuration!" - - - -def equalConfigs(config1,config2,*vargs, **kwargs): - # 1 is dictionary; 2 is database - CHANGED_CONFIG=0 # 1 if at least 1 parameter changed, or if at least 1 new parameter for a certain tool was introduced (i.e. changed in comparison to the standard value), or if a new tool was introduced in dictionary which is not in the database - CHANGED_TRIGGERS=0 # 1 if monitoring triggers have changed - CHANGED_COMBINED=0 # 1 if any of ToolConfig or MonitoringTriggers has changed - - if config1==None or config2==None: - if config1==config2: - return 1 - else: - return 0 - else: - from AthenaCommon.AppMgr import ToolSvc - exec "from %s import %s" % (config1["PkgName"],config1["CreatorName"]) - exec "%s()" % (config1["CreatorName"]) - #print config1["PkgName"],config1["CreatorName"] - - - toolname_list2=[] - for (toolname2, toolconfig2) in config2['Config']: - toolname_list2.append(toolname2) - - for (toolname1, toolconfig1) in config1['Config']: - if toolname1 not in toolname_list2: - print "New tool in dictionary was introduced!" - CHANGED_CONFIG=1 - break - else: - for (toolname2, toolconfig2) in config2['Config']: - if toolname2==toolname1: - tool=getattr(ToolSvc,toolname1) - for (confattr1, confvalue1) in toolconfig1.items(): - if confattr1 not in toolconfig2: - print "New parameter for some tool was introduced!" - # we should check if such parameter exists for a certain tool - # if it exists then we should compare to the value of an attribute (if it is an attribute of a tool) or we should compare its values to default values (if it is not an attribute of a tool) - if hasattr(tool,confattr1): - if confvalue1==getattr(tool,confattr1): - CHANGED_CONFIG=0 - # here we put "None" in the config code of the DB if all values are like in the release or like default values (see isDictionaryLikeInRelease and readDictionaries) - continue - else: - CHANGED_CONFIG=1 - break - elif tool.getDefaultProperty(confattr1): - if confvalue1==tool.getDefaultProperty(confattr1): - CHANGED_CONFIG=0 - # here we should put "None" in the config code of the DB if all values are like in the release or like default values (see isDictionaryLikeInRelease and readDictionaries) - continue - else: - CHANGED_CONFIG=1 - break - else: - print "Newly introduced parameter for some tool does not exist! Although this situation should not occur because it was checked in validateDictionary(dictionary) already.." - CHANGED_CONFIG=1 - break - else: - if toolconfig2[confattr1]==confvalue1: - CHANGED_CONFIG=0 - continue - else: - # 1 if the configuration in 'Config' part has changed - CHANGED_CONFIG=1 - break - if CHANGED_CONFIG!=0: - break - if CHANGED_CONFIG!=0: - break - - - - # get monitoring triggers from config1 and config2 and sort them - monitoring_triggers1 = sorted(config1['MonitoringTriggers']) - monitoring_triggers2 = sorted(config2['MonitoringTriggers']) - - #print monitoring_triggers1 - #print monitoring_triggers2 - - if monitoring_triggers1==monitoring_triggers2: - CHANGED_TRIGGERS=0 - else: - CHANGED_TRIGGERS=1 - - CHANGED_COMBINED = CHANGED_CONFIG or CHANGED_TRIGGERS - #print "CHANGED_CONFIG ", CHANGED_CONFIG - #print "CHANGED_TRIGGERS ", CHANGED_TRIGGERS - return not(CHANGED_COMBINED) - - -def searchSimilarConfigsInDB(dbconnection, slice_name, reco_step, config_code, config_code_hash, *vargs, **kwargs): - # we search similar configs first according to hash code of config code (we don't take into account script comparison yet) - # then for these configs we compare slice_name, reco_step - # then for left config array we check all parameters by hand - # and for the left config array we return config ID of the last entry in the DB ( can happen that there are several identical entries in the DB, but, in principle, should not be the case!!! ) - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - #c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id", my_MCK=MCK) - c.execute("SELECT * FROM sig_config WHERE sig_config.data_hash=:my_data_hash", my_data_hash=config_code_hash) - - - equal_hash_configs=[] - equal_hash_Ids=[] - - - for x in c.fetchall(): - #print "Printing...", x - if x[1]==slice_name and x[2]==reco_step: - equal_hash_configs.append(eval(str(x[3]))) - equal_hash_Ids.append(x[0]) - - #print equal_hash_configs - #print equal_hash_Ids - - # now we want to compare config parameters to be sure that they are equal. Although hasgh codes are the same it can happen that configs are not the same, probability of such a situation is very low.. - - equal_configs=[] - equal_Ids=[] - - if len(equal_hash_configs)!=0: - for i,db_config in enumerate(equal_hash_configs): - if equalConfigs(config_code,db_config): - equal_configs.append(db_config) - equal_Ids.append(equal_hash_Ids[i]) - - conn.close() - - #get config with the last Id - if len(equal_configs)!=0: - return equal_Ids[-1] - else: - return 0 - - -def printConfig(dbconnection, MCK, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - if MCK=='ACTIVE_KEY': - current_MCK=getActiveKeyMCK('oracle') - elif MCK=='LAST_MCK': - current_MCK=getMaxMCK('oracle') - else: - current_MCK=MCK - - MasterKey=[] - MTconnect1=[] - MTconnect2=[] - TypeId=[] - RunType=[] - t2s_connect1=[] - t2s_connect2=[] - ConfigId=[] - ConfigName=[] - ConfigRecostep=[] - ConfigCode=[] - ConfigCodeHash=[] - ConfigScript=[] - ConfigScriptHash=[] - - c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=current_MCK) - - - for x in c.fetchall(): - #print "Printing...", x - MasterKey.append(x[0]) - MTconnect1.append(x[3]) - MTconnect2.append(x[4]) - TypeId.append(x[5]) - RunType.append(x[6]) - t2s_connect1.append(x[7]) - t2s_connect2.append(x[8]) - ConfigId.append(x[9]) - ConfigName.append(x[10]) - ConfigRecostep.append(x[11]) - ConfigCodeHash.append(x[14]) - ConfigScriptHash.append(x[15]) - - slice_list = ['TAU','EGAMMA','JET','BJET','BPHYS','MET','MINBIAS','MUON','IDTRK','IDJPSI','CALORAW','CALOESD'] - - print "MCK: ", MasterKey[0] - print " Physics: ", TypeId[RunType.index('Physics')] - for ii in slice_list: - for i, item in enumerate(ConfigId): - if RunType[i]=='Physics': - if ConfigName[i]==ii: - print ('{0:6} {1:10} {2:10}'.format(item,ii,ConfigRecostep[i])) - - print " Standby: ", TypeId[RunType.index('Standby')] - for ii in slice_list: - for i, item in enumerate(ConfigId): - if RunType[i]=='Standby': - if ConfigName[i]==ii: - print ('{0:6} {1:10} {2:10}'.format(item,ii,ConfigRecostep[i])) - - print " Cosmics: ", TypeId[RunType.index('Cosmics')] - for ii in slice_list: - for i, item in enumerate(ConfigId): - if RunType[i]=='Cosmics': - if ConfigName[i]==ii: - print ('{0:6} {1:10} {2:10}'.format(item,ii,ConfigRecostep[i])) - - conn.close() - return None - -def printAllConfig(dbconnection, MCK, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - if MCK=='ACTIVE_KEY': - current_MCK=getActiveKeyMCK('oracle') - elif MCK=='LAST_MCK': - current_MCK=getMaxMCK('oracle') - else: - current_MCK=MCK - - MasterKey=[] - MTconnect1=[] - MTconnect2=[] - TypeId=[] - RunType=[] - t2s_connect1=[] - t2s_connect2=[] - ConfigId=[] - ConfigName=[] - ConfigRecostep=[] - ConfigCode=[] - ConfigCodeHash=[] - ConfigScript=[] - ConfigScriptHash=[] - - #maxMCK=getMaxMCK('oracle') - - - # we have to get CLOBS separately, when fetchall they get spoiled... known problem with cx_Oracle - c.execute("SELECT sc_data FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=current_MCK) - - for value, in c: - #print "Doing some manipulation with value", value.read() - ConfigCode.append(eval(str(value))) - - c.execute("SELECT sc_script FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=current_MCK) - - for script_value, in c: - ConfigScript.append(str(script_value)) - - - c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt_id=:my_MCK and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=:my_MCK", my_MCK=current_MCK) - - - for x in c.fetchall(): - #print "Printing...", x - MasterKey.append(x[0]) - MTconnect1.append(x[3]) - MTconnect2.append(x[4]) - TypeId.append(x[5]) - RunType.append(x[6]) - t2s_connect1.append(x[7]) - t2s_connect2.append(x[8]) - ConfigId.append(x[9]) - ConfigName.append(x[10]) - ConfigRecostep.append(x[11]) - ConfigCodeHash.append(x[14]) - ConfigScriptHash.append(x[15]) - - - slice_list = ['TAU','EGAMMA','JET','BJET','BPHYS','MET','MINBIAS','MUON','IDTRK','IDJPSI','CALORAW','CALOESD'] - - print "MCK: ", MasterKey[0] - print "============" - print "Physics: ", TypeId[RunType.index('Physics')] - print "############" - - for ii in slice_list: - for i, item in enumerate(ConfigId): - if RunType[i]=='Physics': - if ConfigName[i]==ii: - print "===> ",ConfigId[i], " ", ConfigName[i], " ", ConfigRecostep[i], " ", ConfigCode[i], " ", ConfigScript[i] - - print "Standby: ", TypeId[RunType.index('Standby')] - print "############" - - for ii in slice_list: - for i, item in enumerate(ConfigId): - if RunType[i]=='Standby': - if ConfigName[i]==ii: - print "===> ",ConfigId[i], " ", ConfigName[i], " ", ConfigRecostep[i], " ", ConfigCode[i], " ", ConfigScript[i] - - print "Cosmics: ", TypeId[RunType.index('Cosmics')] - print "############" - - for ii in slice_list: - for i, item in enumerate(ConfigId): - if RunType[i]=='Cosmics': - if ConfigName[i]==ii: - print "===> ",ConfigId[i], " ", ConfigName[i], " ", ConfigRecostep[i], " ", ConfigCode[i], " ", ConfigScript[i] - - conn.close() - return None - - -def printDB(dbconnection, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - - MasterKey=[] - MTconnect1=[] - MTconnect2=[] - TypeId=[] - RunType=[] - t2s_connect1=[] - t2s_connect2=[] - ConfigId=[] - ConfigName=[] - ConfigRecostep=[] - ConfigCode=[] - ConfigCodeHash=[] - ConfigScript=[] - ConfigScriptHash=[] - - c.execute("SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE mt2type.type_id1=type_table.tt_id and mt2type.master_id=mt_id and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id") - - for x in c.fetchall(): - #print "Printing...", x - MasterKey.append(x[0]) - TypeId.append(x[5]) - RunType.append(x[6]) - ConfigId.append(x[9]) - ConfigName.append(x[10]) - ConfigRecostep.append(x[11]) - - MCK_list=[] - TypeId_list=[] - - # values in MasterKey list are repeating - #we want to produce a list of unique values - for i,value in enumerate(MasterKey): - if value not in MCK_list: - MCK_list.append(value) - - - slice_list = ['TAU','EGAMMA','JET','BJET','BPHYS','MET','MINBIAS','MUON','IDTRK','IDJPSI','CALORAW','CALOESD'] - - - MCK_list=[] - - # values in MasterKey list are repeating - #we want to produce a list of unique values - - for i,value in enumerate(MasterKey): - if value not in MCK_list: - MCK_list.append(value) - - active_MCK=getActiveKeyMCK('oracle') - - physicsList=[] - standbyList=[] - cosmicsList=[] - - # get sorted lists of types - for mck in sorted(MCK_list): - for i,item in enumerate(TypeId): - if MasterKey[i]==mck: - if RunType[i]=='Physics': - physicsList.append(item) - break - - - for mck in sorted(MCK_list): - for i,item in enumerate(TypeId): - if MasterKey[i]==mck: - if RunType[i]=='Standby': - standbyList.append(item) - break - - for mck in sorted(MCK_list): - for i,item in enumerate(TypeId): - if MasterKey[i]==mck: - if RunType[i]=='Cosmics': - cosmicsList.append(item) - break - - #print physicsList - #print standbyList - #print cosmicsList - - for mck_index,mck in enumerate(sorted(MCK_list)): - if mck==active_MCK: - print "ACTIVE MCK: ",mck - print "===============" - else: - print "MCK: ",mck - print "=======" - print "Physics: ",physicsList[mck_index] - for ii in slice_list: - for i,item in enumerate(ConfigId): - if MasterKey[i]==mck: - if TypeId[i]==physicsList[mck_index]: - if ConfigName[i]==ii: - print ('{0:6} {1:10} {2:10}'.format(item,ConfigName[i],ConfigRecostep[i])) - - print "Standby: ",standbyList[mck_index] - for ii in slice_list: - for i,item in enumerate(ConfigId): - if MasterKey[i]==mck: - if TypeId[i]==standbyList[mck_index]: - if ConfigName[i]==ii: - print ('{0:6} {1:10} {2:10}'.format(item,ConfigName[i],ConfigRecostep[i])) - - print "Cosmics: ",cosmicsList[mck_index] - for ii in slice_list: - for i,item in enumerate(ConfigId): - if MasterKey[i]==mck: - if TypeId[i]==cosmicsList[mck_index]: - if ConfigName[i]==ii: - print ('{0:6} {1:10} {2:10}'.format(item,ConfigName[i],ConfigRecostep[i])) - - conn.close() - return None - - -def printMCKandTypes(dbconnection, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - - MasterKey=[] - MTconnect1=[] - MTconnect2=[] - TypeId=[] - RunType=[] - t2s_connect1=[] - t2s_connect2=[] - ConfigId=[] - ConfigName=[] - ConfigRecostep=[] - ConfigCode=[] - ConfigCodeHash=[] - ConfigScript=[] - ConfigScriptHash=[] - - c.execute("SELECT * FROM master_table, mt2type, type_table WHERE mt2type.type_id1=type_table.tt_id and mt2type.master_id=mt_id") - - - for x in c.fetchall(): - #print "Printing...", x - MasterKey.append(x[0]) - TypeId.append(x[5]) - RunType.append(x[6]) - - MCK_list=[] - - # values in MasterKey list are repeating - #we want to produce a list of unique values - - for i,value in enumerate(MasterKey): - if value not in MCK_list: - MCK_list.append(value) - - active_MCK=getActiveKeyMCK('oracle') - - for mck in sorted(MCK_list): - if mck==active_MCK: - print "ACTIVE MCK: ",mck - else: - print "MCK: ",mck - for i,item in enumerate(MasterKey): - if item==mck: - print "Type ID: ",TypeId[i], " Run Type: ", RunType[i] - - conn.close() - return 0 - -def validateDictionary(dictionary, slice_name, slice_match_pattern, *vargs, **kwargs): - from AthenaCommon.AppMgr import ToolSvc - import re - # here we need to compare PkgName and CreatorName to patterns to be sure we take the right ones - if re.search(slice_match_pattern,dictionary["PkgName"],re.I) and re.search(slice_match_pattern,dictionary["CreatorName"],re.I): - pass - else: - print "PkgName or CreatorName are wrong! Look at ", dictionary["PkgName"], " ", dictionary["CreatorName"] - return 0 - - # because for Calo tools there are couple of flags defined in the release we have to consider CALO case separately to be able to call all tools for CALO for RAW and ESD steps: - - from AthenaMonitoring.DQMonFlags import DQMonFlags - - if slice_name=='CALORAW': - DQMonFlags.monManEnvironment = 'tier0Raw' - elif slice_name=='CALOESD': - DQMonFlags.monManEnvironment = 'tier0ESD' - else: - DQMonFlags.monManEnvironment = 'tier0ESD' - - exec "from %s import %s" % (dictionary["PkgName"],dictionary["CreatorName"]) - exec "%s()" % (dictionary["CreatorName"]) - #print dictionary["PkgName"],dictionary["CreatorName"] - for (toolname, toolconfig) in dictionary["Config"]: - #print "toolname ", toolname - #exec "hasattr(ToolSvc,%s)" % (toolname) - #print "HERE! ", hasattr(ToolSvc,toolname) - - # here we need to compare tool names to patterns to be sure we take the right ones - if re.search(slice_match_pattern,toolname,re.I): - pass - else: - print "Tool name is wrong! Look at ", dictionary["CreatorName"] - return 0 - if hasattr(ToolSvc,toolname): - tool=getattr(ToolSvc,toolname) - for confattr,confvalue in toolconfig.items(): - if hasattr(tool,confattr): - continue - elif tool.getDefaultProperty(confattr): - continue - else: - print "There is no parameter ", confattr, " in tool ", toolname, " Check the name of the tool parameter!" - return 0 - else: - print "There is no tool ", toolname, " in ToolSvc. Check the name of the tool!" - return 0 - - return 1 - - -def isDictionaryLikeInRelease(dictionary, *vargs, **kwargs): - # here we check if parameters in dictionary have values like in release or default values, if yes we return 1, else 0 - # we suppose that dictionary is already validated and all tool names and parameter names are correct and dictionary is not empty - from AthenaCommon.AppMgr import ToolSvc - exec "from %s import %s" % (dictionary["PkgName"],dictionary["CreatorName"]) - exec "%s()" % (dictionary["CreatorName"]) - #print dictionary["PkgName"],dictionary["CreatorName"] - for (toolname, toolconfig) in dictionary["Config"]: - if hasattr(ToolSvc,toolname): - tool=getattr(ToolSvc,toolname) - #if toolname=="HLTTauMon": - #print tool - for confattr,confvalue in toolconfig.items(): - if hasattr(tool,confattr): - #print "dictionary value ", toolname, confattr, confvalue - #print "release value", getattr(tool,confattr) - if getattr(tool,confattr)==confvalue: - continue - else: - return 0 - elif tool.getDefaultProperty(confattr): - #print "dictionary value ", toolname, confattr, confvalue - #print "default value", tool.getDefaultProperty(confattr) - if tool.getDefaultProperty(confattr)==confvalue: - continue - else: - return 0 -# else: -# print "There is no parameter ", confattr, " in tool ", toolname, " Check the name of the tool parameter!" -# return 0 -# else: -# print "There is no tool ", toolname, " in ToolSvc. Check the name of the tool!" -# return 0 - - return 1 - - - -def searchSimilarTypeIdInDB(dbconnection, run_type, configIdList, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - #print "configIdList", configIdList - - in_clause = ', '.join([':id%d' % x for x in xrange(len(configIdList))]) - - sql = 'SELECT * FROM master_table, mt2type, type_table, type2sig, sig_config WHERE sig_config.sc_id in (%s) and type2sig.sig_config_id=sig_config.sc_id and type2sig.type_id2=type_table.tt_id and mt2type.type_id1=type_table.tt_id and mt_id=mt2type.master_id' % in_clause - - c.execute(sql,configIdList) - - run_type_id_array=[] - slice_name_array=[] - config_id_array=[] - run_type_array=[] - - run_type_id_array_fortype=[] - slice_name_array_fortype=[] - config_id_array_fortype=[] - run_type_array_fortype=[] - - - for x in c.fetchall(): - #print "Printing...", x - if len(x)>0: - run_type_array.append(x[6]) - run_type_id_array.append(x[5]) - slice_name_array.append(x[10]) - config_id_array.append(x[9]) - else: - return 0 - - for i, rt in enumerate(run_type_array): - if rt==run_type: - run_type_array_fortype.append(rt) - slice_name_array_fortype.append(slice_name_array[i]) - config_id_array_fortype.append(config_id_array[i]) - run_type_id_array_fortype.append(run_type_id_array[i]) - - - #print "run_type_id_array ", run_type, " ", run_type_id_array_fortype - #print "run_type_array ", run_type, " ", run_type_array_fortype - #print "slice_name_array_fortype ", slice_name_array_fortype - #print "config_id_array ", config_id_array_fortype - - conn.close() - tmp_list=[] - - for i,value in enumerate(run_type_id_array_fortype): - if value not in tmp_list: - tmp_list.append(value) - - #print "tmp_list ", tmp_list - - # we got the list of all type IDs contained in run_type_id_array - - slice_list = ['TAU','EGAMMA','JET','BJET','BPHYS','MET','MINBIAS','MUON','IDTRK','IDJPSI','CALORAW','CALOESD'] - - array_of_my_slice_array=[] - - - for i in tmp_list: - # fill array of slice names for these type IDs - my_slice_array=[] - for ii, v1 in enumerate(run_type_id_array_fortype): - if v1==i: - my_slice_array.append(slice_name_array_fortype[ii]) - array_of_my_slice_array.append(my_slice_array) - #print " my_slice_array ", my_slice_array - - #print "slice list ", slice_list - for i, arr in enumerate(array_of_my_slice_array): - #print "arr.. ", arr - if sorted(slice_list)==sorted(arr): - #print "found similar type ID ", tmp_list[i] - return tmp_list[i] - - #print "no similar type.." - return 0 - -def getMCKforTypeID(dbconnection, run_type_id, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - c.execute("SELECT * FROM master_table, mt2type, type_table WHERE tt_id=:my_run_type_id and mt2type.type_id1=type_table.tt_id and mt2type.master_id=mt_id", my_run_type_id=run_type_id) - - MCK_list=[] - for x in c.fetchall(): - MCK_list.append(x[0]) - - conn.close() - #print MCK_list - - if len(MCK_list)>0: - return MCK_list - else: - return 0 - - -def makeActive(dbconnection, MCK, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - - # check that a certain MCK exists - c.execute("SELECT mt_id FROM master_table") - - all_MCK = [] - for x in c.fetchall(): - #print x[0] - all_MCK.append(x[0]) - #max_id = x[0] - - #print all_MCK - - # here we should check if such MCK number exists or if MCK number is max MCK number + 1 (we just cannot read here noncommitted numbers from the database. If new MCK number came, makeActive was called before we committed database with new MCK number..) - if MCK in all_MCK or MCK==max(all_MCK)+1: - print "Such MCK really exists!" - else: - print "Check MCK number! It does not exist!" - return 0 - - # now we want to check if this MCK is already active. If yes then we don't create new entry in the table - current_active_MCK=getActiveKeyMCK('oracle') - if MCK==current_active_MCK: - print "This MCK is already active!" - return 0 - - # get the last ID from the activeMCK_table: - c.execute("SELECT max(activeMCK_id) FROM activeMCK_table") - - for x in c.fetchall(): - #print x[0] - maxActiveID = x[0] - - nextActiveID=maxActiveID+1 - c.execute("INSERT INTO activeMCK_table VALUES ("+str(nextActiveID)+", 'activeMCK', "+str(MCK)+")") - print "New Active MCK is written into activeMCK_table: ID = ", nextActiveID, ", MCK = ",MCK - conn.commit() - conn.close() - - return 0 - -def printActiveMCKTable(dbconnection, *vargs, **kwargs): - if dbconnection=='oracle': - import cx_Oracle - import sys - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - c.execute("SELECT * FROM activeMCK_table") - - activeMCK_id=[] - sorted_activeMCK_id=[] - activeMCK=[] - for x in c.fetchall(): - #print x - activeMCK_id.append(x[0]) - activeMCK.append(x[2]) - - - sorted_activeMCK_id=sorted(activeMCK_id) - - for i,value in enumerate(sorted_activeMCK_id): - for ii,val in enumerate(activeMCK_id): - if val==value: - print "MCK ID: ",val, " MCK: ", activeMCK[ii] - else: - continue - - return 0 diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/upload.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/upload.py deleted file mode 100644 index ced9d1d2588b..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/upload.py +++ /dev/null @@ -1,663 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -include("readDictionary.py") -include("tools.py") -import hashlib -import sys - -#def upload(active_flag): -#def upload(): -def upload(MakeActive='False'): - - # read dictionary with the following lists: - ConfigNameDict=[] - ConfigRunTypeDict=[] - ConfigRecostepDict=[] - ConfigCodeDict=[] - ConfigCodeHashDict=[] - ConfigScriptDict=[] - ConfigScriptHashDict=[] - ConfigNameMissingDict=[] - ConfigTypeMissingDict=[] - ConfigRecostepMissingDict=[] - # here we get an array for different slices - - if readDictionary()==0: - print "Something wrong with dictionaries. Check dictionaries!" - #sys.exit("Something wrong with dictionaries. Check dictionaries!") - return 0 - else: - ConfigNameDict,ConfigRunTypeDict,ConfigRecostepDict,ConfigCodeDict,ConfigCodeHashDict,ConfigScriptDict,ConfigScriptHashDict=readDictionary() - - # for missing or empty dictionaries we assume that topOption will get monitoring config from release. Therefore we fill here "None" for these configs and scripts - - # get active key MCK - - activeKeyMCK = getActiveKeyMCK('oracle') - print "active key MCK = ", activeKeyMCK - - # if OVERWRITE = 1 then we should create a new config entry in the DB or take the existing one but not the active one - OVERWRITE=0 - # if EQUAL_CONFIG = 0 then config from dictionary is different than in the active key configuration - EQUAL_CONFIG=[] - index_list=[] - dbConfigId_check=[] - - #print "ConfigNameDict", ConfigNameDict - #print "enumerate(ConfigNameDict)", enumerate(ConfigNameDict) - - for index,conf_name in enumerate(ConfigNameDict): - #print "#####################################" - #print "index", index - #print " dict slice, RunType ", conf_name, ConfigRunTypeDict[index] - #print "config code from dict ", ConfigCodeDict[index] - #print "dict hash ", ConfigCodeHashDict[index] - #print "ConfigRecostepDict[index] ", ConfigRecostepDict[index] - - # get active key conf and compare hash of config code: - #print "'oracle', activeKeyMCK, ConfigRunTypeDict[index], conf_name, ConfigRecostepDict[index]",'oracle', activeKeyMCK, ConfigRunTypeDict[index], conf_name, ConfigRecostepDict[index] - dbConfigId, dbConfigCode, dbConfigScript, dbConfigCodeHash, dbConfigScriptHash=getConfigCodeForMCK('oracle', activeKeyMCK, ConfigRunTypeDict[index], conf_name, ConfigRecostepDict[index]) - - dbConfigId_check.append(dbConfigId) - - #print "name ", conf_name - #print "code from DB ", dbConfigCode - #print "dict hash ", ConfigCodeHashDict[index] - #print "db hash ", dbConfigCodeHash - - if ConfigCodeHashDict[index]==dbConfigCodeHash: - # check all parameters to be sure - # print "equalConfigs = ", equalConfigs(ConfigCodeDict[index],dbConfigCode) - if equalConfigs(ConfigCodeDict[index],dbConfigCode): - EQUAL_CONFIG.append(1) - index_list.append(index) - #OVERWRITE=0 - else: - EQUAL_CONFIG.append(0) - index_list.append(index) - OVERWRITE=1 - else: - #print "This configuration from dictionary is new!" - EQUAL_CONFIG.append(0) - index_list.append(index) - OVERWRITE=1 - #break - -# print "dbConfigId_check ", dbConfigId_check - print "OVERWRITE ", OVERWRITE -# print "slice ", ConfigNameDict -# print "run type ", ConfigRunTypeDict - print "EQUAL_CONFIG ", EQUAL_CONFIG -# print "index list ", index_list - - uploaded_MCK=0 - -# if OVERWRITE=1: write new configuration into the database: search for every config hash the entries in the DB for which there is the same hash, then compare them to be sure and connect to those configs; otherwise create new config IDs for new configs; for already equal configs take the existing configID connection -# if OVERWRITE=0: leave DB like it is - -# in tools: search in DB for EQUAL_CONFIG = 0 configs with equal hash code, it can be several equal hash cosed, return array of them, then for every of them check if all parameters are equal. If all are equal take the last entry. If only one config found then take it. If none found then create a new config entry. -# create an array of sig_config IDs with rearranged numbers -# recreate connecting table type2sig -# create a new entry for MCK -# rewrite connecting table mt2type - - import cx_Oracle - conn = cx_Oracle.connect('tklimk/IiUTYhtOYL956!@devdb11') - c = conn.cursor() - - checkPhysics=0 - checkStandby=0 - checkCosmics=0 - - similar_config=[] - - physicsConfigID=[] - cosmicsConfigID=[] - standbyConfigID=[] - - CHECK_PHYSICS_LIST=[] - CHECK_COSMICS_LIST=[] - CHECK_STANDBY_LIST=[] - - physics_flags=[] - cosmics_flags=[] - standby_flags=[] - - # flags for: if all EQUAL_CONFIG values for a certain type=1 with OVERWRITE=1 (that means that some type has to be rewritten) then we leave active type ID for this type - - physics_leave_active_type=0 - cosmics_leave_active_type=0 - standby_leave_active_type=0 - - - if OVERWRITE==1: - - # now we need to check if we can take already existing type IDs for our configs - # this should not happen if all flags of EQUAL_CONFIG=1 (this is not the case if OVERWRITE=1) - # this should happen if for EQUAL_CONFIG=0 there is an equal config in the DB exists OR if EQUAL_CONFIG=1 - # introduce flag/function EQUAL_PHYSICS_TYPE, ... if there is a similar TYPE table in the DB. It returns the type_ID if exists and 0 if not. In the last case we create new entry for a certain type - - for i,flag in enumerate(EQUAL_CONFIG): - if ConfigRunTypeDict[i]=='Physics': - physics_flags.append(flag) - elif ConfigRunTypeDict[i]=='Cosmics': - cosmics_flags.append(flag) - elif ConfigRunTypeDict[i]=='Standby': - standby_flags.append(flag) - - if 0 in physics_flags: - physics_leave_active_type=0 - else: - physics_leave_active_type=1 - if 0 in cosmics_flags: - cosmics_leave_active_type=0 - else: - cosmics_leave_active_type=1 - if 0 in standby_flags: - standby_leave_active_type=0 - else: - standby_leave_active_type=1 - - print "physics_leave_active_type = ", physics_leave_active_type - print "cosmics_leave_active_type = ", cosmics_leave_active_type - print "standby_leave_active_type = ", standby_leave_active_type - - print "physics_flags ", physics_flags - print "cosmics_flags ", cosmics_flags - print "standby_flags ", standby_flags - - existing_physics_type=0 - existing_cosmics_type=0 - existing_standby_type=0 - - if physics_leave_active_type: - # here we should find out the ID of active Physics Type - existing_physics_type = getTypeIDForMCK('oracle', activeKeyMCK, 'Physics') - if standby_leave_active_type: - # here we should find out the ID of active Standby Type - existing_standby_type = getTypeIDForMCK('oracle', activeKeyMCK, 'Standby') - if cosmics_leave_active_type: - # here we should find out the ID of active Physics Type - existing_cosmics_type = getTypeIDForMCK('oracle', activeKeyMCK, 'Cosmics') - - - for i,flag in enumerate(EQUAL_CONFIG): - similar_config.append(searchSimilarConfigsInDB('oracle', ConfigNameDict[i], ConfigRecostepDict[i], ConfigCodeDict[i], ConfigCodeHashDict[i])) - - if ConfigRunTypeDict[i]=='Physics': - if ((flag==0 and similar_config[-1]!=0) or flag==1) and physics_leave_active_type==0: - #CHECK_PHYSICS_LIST.append(1) - if flag==0 and similar_config[-1] != 0: - # now we need to find out which type IDs are connected to this config and check the size of this list of IDs (we check if it does not return 0) - if getAvailableTypeIDsForGivenConfig('oracle', similar_config[-1], 'Physics'): - physicsConfigID.append(similar_config[-1]) - CHECK_PHYSICS_LIST.append(1) - else: - CHECK_PHYSICS_LIST.append(0) - elif flag==1: - # we take number from active MCK - # put config ID from active config - physicsConfigID.append(getConfigIdForMCK('oracle', activeKeyMCK, 'Physics', ConfigNameDict[i], ConfigRecostepDict[i])) - CHECK_PHYSICS_LIST.append(1) - elif similar_config[-1]==0: - CHECK_PHYSICS_LIST.append(0) - - if ConfigRunTypeDict[i]=='Standby': - if ((flag==0 and similar_config[-1]!=0) or flag==1) and standby_leave_active_type==0: - #CHECK_STANDBY_LIST.append(1) - if flag==0 and similar_config[-1] != 0: - # now we need to find out which type IDs are connected to this config and check the size of this list of IDs (we check if it does not return 0) - if getAvailableTypeIDsForGivenConfig('oracle', similar_config[-1], 'Standby'): - standbyConfigID.append(similar_config[-1]) - CHECK_STANDBY_LIST.append(1) - else: - CHECK_STANDBY_LIST.append(0) - elif flag==1: - # we take number from active MCK - # put config ID from active config - standbyConfigID.append(getConfigIdForMCK('oracle', activeKeyMCK, 'Standby', ConfigNameDict[i], ConfigRecostepDict[i])) - CHECK_STANDBY_LIST.append(1) - elif similar_config[-1]==0: - CHECK_STANDBY_LIST.append(0) - - if ConfigRunTypeDict[i]=='Cosmics': - if ((flag==0 and similar_config[-1]!=0) or flag==1) and cosmics_leave_active_type==0: - #CHECK_COSMICS_LIST.append(1) - if flag==0 and similar_config[-1] != 0: - # now we need to find out which type IDs are connected to this config and check the size of this list of IDs (we check if it does not return 0) - if getAvailableTypeIDsForGivenConfig('oracle', similar_config[-1], 'Cosmics'): - cosmicsConfigID.append(similar_config[-1]) - CHECK_COSMICS_LIST.append(1) - else: - CHECK_COSMICS_LIST.append(0) - elif flag==1: - # we take number from active MCK - # put config ID from active config - cosmicsConfigID.append(getConfigIdForMCK('oracle', activeKeyMCK, 'Cosmics', ConfigNameDict[i], ConfigRecostepDict[i])) - CHECK_COSMICS_LIST.append(1) - elif similar_config[-1]==0: - CHECK_COSMICS_LIST.append(0) - -####################################################### - # here we check if separates configs for a certain slice have the same values. If we will need later to create new entries in the DB we will have to write them with the same new ID.. - - list_of_flag_list=[] - list_of_similar_config_list=[] - list_of_equal_config_list=[] - list_of_hash_list=[] - list_of_type_list=[] - - slice_list = ['TAU','EGAMMA','JET','BJET','BPHYS','MET','MINBIAS','MUON','IDTRK','IDJPSI','CALORAW','CALOESD'] - for sl in slice_list: - flag_list={} - similar_config_list={} - hash_list={} - for i,flag in enumerate(EQUAL_CONFIG): - if ConfigNameDict[i]==sl: - if ConfigRunTypeDict[i]=='Physics': - flag_list['Physics']=flag - similar_config_list['Physics']=similar_config[i] - hash_list['Physics']=ConfigCodeHashDict[i] - elif ConfigRunTypeDict[i]=='Standby': - flag_list['Standby']=flag - similar_config_list['Standby']=similar_config[i] - hash_list['Standby']=ConfigCodeHashDict[i] - elif ConfigRunTypeDict[i]=='Cosmics': - flag_list['Cosmics']=flag - similar_config_list['Cosmics']=similar_config[i] - hash_list['Cosmics']=ConfigCodeHashDict[i] - - list_of_type_list.append(flag_list.keys()) - list_of_flag_list.append(flag_list.values()) - list_of_similar_config_list.append(similar_config_list.values()) - list_of_hash_list.append(hash_list.values()) - - # now search for equal configs from dictionaries - for ii in list_of_hash_list: - equal_config_list={} - if ii[0]==ii[1] and ii[0]!=ii[2]: - equal_config_list=[1,1,0] - elif ii[0]!=ii[1] and ii[0]==ii[2]: - equal_config_list=[1,0,1] - elif ii[0]!=ii[1] and ii[0]!=ii[2]: - equal_config_list=[0,0,0] - elif ii[0]==ii[1] and ii[0]==ii[2]: - equal_config_list=[1,1,1] - list_of_equal_config_list.append(equal_config_list) - - #print "list_of_type_list ",list_of_type_list - #print "list_of_equal_config_list ", list_of_equal_config_list - #print "list_of_flag_list", list_of_flag_list - #print "list_of_similar_config_list", list_of_similar_config_list - #print "list_of_hash_list", list_of_hash_list - - list_of_leave_same_config_id=[] - for eqconfig_index,eqconfig_entry in enumerate(list_of_equal_config_list): - leave_same_config_id=[] - if 1 not in eqconfig_entry: - for index,entry in enumerate(eqconfig_entry): - #if list_of_flag_list[eqconfig_index][index]==0 and list_of_similar_config_list[eqconfig_index][index]==0: - leave_same_config_id.append(0) - #else: - # leave_same_config_id.append(0) - else: - for index,entry in enumerate(eqconfig_entry): - if entry==1 and list_of_flag_list[eqconfig_index][index]==0 and list_of_similar_config_list[eqconfig_index][index]==0: - leave_same_config_id.append(1) - else: - leave_same_config_id.append(0) - - - list_of_leave_same_config_id.append(leave_same_config_id) - - - print "list_of_leave_same_config_id ", list_of_leave_same_config_id - - #we are going to create a list of config IDs... - - list_of_id_increment=[] - list_of_repeated_id=[] - for ii in list_of_leave_same_config_id: - id_increment=[] - repeated_id=[] - if ii==[1,0,1]: - id_increment=[0,0,1] - repeated_id=[1,0,0] - elif ii==[1,1,0]: - id_increment=[1,0,0] - repeated_id=[0,1,0] - elif ii==[0,1,1]: - id_increment=[0,0,1] - repeated_id=[0,1,0] - elif ii==[1,1,1]: - id_increment=[0,0,1] - repeated_id=[1,1,0] - else: - id_increment=[0,0,0] - repeated_id=[0,0,0] - - - list_of_id_increment.append(id_increment) - list_of_repeated_id.append(repeated_id) - - #print "list_of_id_increment before adding", list_of_id_increment - #print "list_of_repeated_id ", list_of_repeated_id - - # now we should take into account increments for those configs which are not parts of equal dictionary configs (when entries in list_of_leave_same_config_id are 0 and flag = 0 and similar_configs=0) - - - for ii,val in enumerate(list_of_leave_same_config_id): - for ind,entry in enumerate(val): - #print "entry ", entry - #print "list_of_flag_list[ii][ind] ", list_of_flag_list[ii][ind] - #print "list_of_similar_config_list[ii][ind] ",list_of_similar_config_list[ii][ind] - if entry==0 and list_of_flag_list[ii][ind]==0 and list_of_similar_config_list[ii][ind]==0: - list_of_id_increment[ii][ind]=1 - - #print "list_of_id_increment after adding", list_of_id_increment - - # now we need to sort list_of_id_increment back according to EQUAL_CONFIG - - list_of_id_increment_sorted=[] - list_of_repeated_id_sorted=[] - for i,flag in enumerate(EQUAL_CONFIG): - for ii,val in enumerate(list_of_id_increment): - for tt,ttval in enumerate(val): - if slice_list[ii]==ConfigNameDict[i] and list_of_type_list[ii][tt]==ConfigRunTypeDict[i]: - list_of_id_increment_sorted.append(ttval) - for ii,val in enumerate(list_of_repeated_id): - for tt,ttval in enumerate(val): - if slice_list[ii]==ConfigNameDict[i] and list_of_type_list[ii][tt]==ConfigRunTypeDict[i]: - list_of_repeated_id_sorted.append(ttval) - - print "list_of_id_increment_sorted ",list_of_id_increment_sorted - print "list_of_repeated_id_sorted ",list_of_repeated_id_sorted - - -########################################################### - - print "CHECK_PHYSICS_LIST ", CHECK_PHYSICS_LIST - print "CHECK_COSMICS_LIST ", CHECK_COSMICS_LIST - print "CHECK_STANDBY_LIST ", CHECK_STANDBY_LIST - - if len(CHECK_PHYSICS_LIST)>0: - if 0 in CHECK_PHYSICS_LIST: - checkPhysics=0 - else: - checkPhysics=1 - else: - checkPhysics=0 - if len(CHECK_COSMICS_LIST)>0: - if 0 in CHECK_COSMICS_LIST: - checkCosmics=0 - else: - checkCosmics=1 - else: - checkCosmics=0 - if len(CHECK_STANDBY_LIST)>0: - if 0 in CHECK_STANDBY_LIST: - checkStandby=0 - #EQUAL_STANDBY_TYPE=0 - else: - checkStandby=1 - else: - checkStandby=0 - - # now we want to check if there are similar whole types in DB available - - print "checkPhysics ", checkPhysics - print "checkCosmics ", checkCosmics - print "checkStandby ", checkStandby - - #print "physicsConfigID ", physicsConfigID - #print "cosmicsConfigID ", cosmicsConfigID - #print "standbyConfigID ", standbyConfigID - - equal_cosmics_type=0 - equal_physics_type=0 - equal_standby_type=0 - - if checkPhysics==1: - #print "we are here!" - existing_physics_type=searchSimilarTypeIdInDB('oracle', 'Physics', physicsConfigID) - if existing_physics_type: - equal_physics_type=1 - else: - equal_physics_type=0 - - if checkCosmics==1: - #print "we are here!" - existing_cosmics_type=searchSimilarTypeIdInDB('oracle', 'Cosmics', cosmicsConfigID) - if existing_cosmics_type: - equal_cosmics_type=1 - else: - equal_cosmics_type=0 - - - if checkStandby==1: - #print "we are here!" - existing_standby_type=searchSimilarTypeIdInDB('oracle', 'Standby', standbyConfigID) - if existing_standby_type: - equal_standby_type=1 - else: - equal_standby_type=0 - - EQUAL_PHYSICS_TYPE=(physics_leave_active_type or equal_physics_type) - EQUAL_STANDBY_TYPE=(standby_leave_active_type or equal_standby_type) - EQUAL_COSMICS_TYPE=(cosmics_leave_active_type or equal_cosmics_type) - # if 1 take existing_physics_type as a type ID - - print "EQUAL_PHYSICS_TYPE ", EQUAL_PHYSICS_TYPE - print "EQUAL_STANDBY_TYPE ", EQUAL_STANDBY_TYPE - print "EQUAL_COSMICS_TYPE ", EQUAL_COSMICS_TYPE - - physics_MCK_list = [] - cosmics_MCK_list = [] - standby_MCK_list = [] - - getMCKforTypeID('oracle', existing_physics_type) - - take_existing_MCK=0 - existing_MCK_ID=0 - if EQUAL_PHYSICS_TYPE==1 and EQUAL_COSMICS_TYPE==1 and EQUAL_STANDBY_TYPE==1: - - physics_MCK_list=getMCKforTypeID('oracle', existing_physics_type) - cosmics_MCK_list=getMCKforTypeID('oracle', existing_cosmics_type) - standby_MCK_list=getMCKforTypeID('oracle', existing_standby_type) - print "physics_MCK_list ", physics_MCK_list - print "cosmics_MCK_list ", cosmics_MCK_list - print "standby_MCK_list ", standby_MCK_list - - # now we want to find if these 2 lists have the same MCK value (all 3 lists, not 2). Therefore we first find if 2 lists have the same value and then we search for this value in 3rd list - duplicate_exists=0 - duplicates=[] - for a1 in physics_MCK_list: - if a1 in cosmics_MCK_list: - duplicates.append(a1) - duplicate_exists=1 - - #print "duplicate_exists = ", duplicate_exists - take_existing_MCK=0 - if duplicate_exists==1: - for dup in duplicates: - if dup in standby_MCK_list: - take_existing_MCK=1 - existing_MCK_ID=dup - - - - if take_existing_MCK==1: - #we make existing MCK as an active MCK. if there is such MCK available leave its number as an active combination (return MCK from upload) - uploaded_MCK=existing_MCK_ID - print "we take existing MCK number ", uploaded_MCK - # because this existing MCK was not active in the case if MakeActive=='True' we need to add a an entry to the table with active MCKs to indacate that there is a new active MCK: - if MakeActive=='True': - makeActive('oracle',uploaded_MCK) - - else: - print "Uploaded configuration is one of the existing configurations in the database but it is different than the active configuration. Because you have chosen not to make an uploaded configuration active we leave active MCK number not changed" - print "Your uploaded MCK: ", uploaded_MCK, " Current active MCK: ", activeKeyMCK - else: - # else create a new MCK - - new_MCK=getMaxMCK('oracle')+1 - uploaded_MCK=new_MCK - c.execute("INSERT INTO master_table VALUES ("+str(new_MCK)+", 'MCK', 'Monitoring Configuration Key')") - # here we fill table activeMCK_table if there is a new MCK and 'True' flag for MakeActive - - if MakeActive=='True': - makeActive('oracle',uploaded_MCK) - else: - print "Uploaded configuration is new and it gets a new MCK number. But because you have chosen not to make it active we leave active MCK number not changed." - print "Your uploaded MCK: ", uploaded_MCK, " Current active MCK: ", activeKeyMCK - - if EQUAL_PHYSICS_TYPE==1 and take_existing_MCK==0: - # rewrite conections - # otherwise (part down takes care of it) - #print "HERE!!!! existing physics type..", existing_physics_type - c.execute("INSERT INTO mt2type VALUES ("+str(new_MCK)+","+str(existing_physics_type)+")") - if EQUAL_COSMICS_TYPE==1 and take_existing_MCK==0: - #print "HERE!!!! existing cosmic type..", existing_cosmics_type - c.execute("INSERT INTO mt2type VALUES ("+str(new_MCK)+","+str(existing_cosmics_type)+")") - if EQUAL_STANDBY_TYPE==1 and take_existing_MCK==0: - #print "HERE!!!! existing standby type..", existing_standby_type - c.execute("INSERT INTO mt2type VALUES ("+str(new_MCK)+","+str(existing_standby_type)+")") - - - #print "HERE!!!! physics type..", existing_physics_type - - - # find out which RunTypes have to be changed: - maxTypeId=getMaxTypeId('oracle') - - # if these flags =1 then we should create new entries for these run types in the DB - changePhysics=0 - changeStandby=0 - changeCosmics=0 - - #for i,flag in enumerate(EQUAL_CONFIG): - #if flag==0 and ConfigRunTypeDict[i]=='Physics' and EQUAL_PHYSICS_TYPE==0 and take_existing_MCK==0: - if EQUAL_PHYSICS_TYPE==0 and take_existing_MCK==0: - changePhysics=1 - if EQUAL_STANDBY_TYPE==0 and take_existing_MCK==0: - changeStandby=1 - if EQUAL_COSMICS_TYPE==0 and take_existing_MCK==0: - changeCosmics=1 - - - # assume that we have only unique entries in the config DB - # if there are some entries with equal config for a certain type or if there are new entries for a certain type then we don't search for similar types in the DB - # only if for a certain type there are all entries new and they exist in the DB we check if they belong to one existing type Id and we take it over. If this is the case for all types then we take over the whole MCK - # we fill flags checkPhysics, checkStandby, checkCosmics: if they are 1, we use them later.. - - - # if we need to create new RunType entries: - # assign new Ids to type table entries - if changePhysics==1: - newPhysicsId=maxTypeId+1 - - if changeStandby==1 and changePhysics==1: - newStandbyId=newPhysicsId+1 - elif changeStandby==1 and changePhysics==0: - newStandbyId=maxTypeId+1 - - #if changeCosmics==1 and changeStandby==1 and (changePhysics==0 or changePhysics==1): - if changeCosmics==1 and changeStandby==1: - newCosmicsId=newStandbyId+1 - elif changeCosmics==1 and changeStandby==0 and changePhysics==1: - newCosmicsId=newPhysicsId+1 - elif changeCosmics==1 and changeStandby==0 and changePhysics==0: - newCosmicsId=maxTypeId+1 - - #print "new type Ids ", newPhysicsId, newStandbyId, newCosmicsId - - if changePhysics==1: - c.execute("INSERT INTO type_table VALUES ("+str(newPhysicsId)+",'Physics')") - c.execute("INSERT INTO mt2type VALUES ("+str(new_MCK)+","+str(newPhysicsId)+")") - if changeStandby==1: - c.execute("INSERT INTO type_table VALUES ("+str(newStandbyId)+",'Standby')") - c.execute("INSERT INTO mt2type VALUES ("+str(new_MCK)+","+str(newStandbyId)+")") - if changeCosmics==1: - c.execute("INSERT INTO type_table VALUES ("+str(newCosmicsId)+",'Cosmics')") - c.execute("INSERT INTO mt2type VALUES ("+str(new_MCK)+","+str(newCosmicsId)+")") - - sig_config_id = getMaxConfigId('oracle') - # now we produce a list of all config IDs - list_of_config_id=[] - for i in list_of_id_increment_sorted: - if i==1: - sig_config_id=sig_config_id+i - list_of_config_id.append(sig_config_id) - else: - list_of_config_id.append(i) - - - print "list of new config id before insertion of repetitions: ", list_of_config_id - - # loop over dictionaries - # here is the case when separate configs have to be changed - # if changePhysics==0 or changeCosmics==0 or changeStandby==0 - # i.e. when we don't take at least one of the whole existing types from DB - - if changePhysics==1 or changeCosmics==1 or changeStandby==1: - for i,flag in enumerate(EQUAL_CONFIG): - #similar_config=searchSimilarConfigsInDB('oracle', ConfigNameDict[i], ConfigRecostepDict[i], ConfigCodeDict[i], ConfigCodeHashDict[i]) - if flag==0 and similar_config[i]==0: - # there are no similar configs in the db --> create new config entries - # this case will go to sig_config table with the following IDs: - config_str_=str(ConfigCodeDict[i]) - config_str=config_str_.replace("'",'"') - #print "HERE!!! ", config_str - script_str_ = str(ConfigScriptDict[i]) - script_str = script_str_.replace("'",'"') - - #sig_config_id=sig_config_id+1 - #print "here new config id ", sig_config_id - if list_of_repeated_id_sorted[i]==0: - new_config_id=list_of_config_id[i] - c.execute("INSERT INTO sig_config VALUES ("+str(new_config_id)+",'"+ConfigNameDict[i]+"','"+ConfigRecostepDict[i]+"','"+config_str+"','"+script_str+"','"+ConfigCodeHashDict[i]+"','"+ConfigScriptHashDict[i]+"')") - elif list_of_repeated_id_sorted[i]==1 and list_of_repeated_id_sorted[i-1]==0 and list_of_id_increment_sorted==0: # if config is new but the same one as the previous one. Then we take previous id and don't create new entry, just reconnect.. - new_config_id=list_of_config_id[i-1] - elif list_of_repeated_id_sorted[i]==1 and list_of_repeated_id_sorted[i-1]==1 and list_of_repeated_id_sorted[i-2]==0 and list_of_id_increment_sorted==0: # if config is new but the same one as the preprevious one (all 3 configs are the same). Then we take preprevious id and don't create new entry, just reconnect.. - new_config_id=list_of_config_id[i-2] - - if ConfigRunTypeDict[i]=='Physics' and changePhysics==1: - c.execute("INSERT INTO type2sig VALUES ("+str(newPhysicsId)+","+str(new_config_id)+")") - elif ConfigRunTypeDict[i]=='Standby' and changeStandby==1: - c.execute("INSERT INTO type2sig VALUES ("+str(newStandbyId)+","+str(new_config_id)+")") - elif ConfigRunTypeDict[i]=='Cosmics' and changeCosmics==1: - c.execute("INSERT INTO type2sig VALUES ("+str(newCosmicsId)+","+str(new_config_id)+")") - elif flag==0 and similar_config[i]!=0: - #that means that there are similar configs in the db, we need just create new connections if there are no whole Type tables similar to our new one existent. This can be only in the case if all configs in the dictionaries for a certain slice are already available in the DB - - if ConfigRunTypeDict[i]=='Physics' and changePhysics==1: - c.execute("INSERT INTO type2sig VALUES ("+str(newPhysicsId)+","+str(similar_config[i])+")") - elif ConfigRunTypeDict[i]=='Standby' and changeStandby==1: - c.execute("INSERT INTO type2sig VALUES ("+str(newStandbyId)+","+str(similar_config[i])+")") - elif ConfigRunTypeDict[i]=='Cosmics' and changeCosmics==1: - c.execute("INSERT INTO type2sig VALUES ("+str(newCosmicsId)+","+str(similar_config[i])+")") - elif flag==1: - # these configurations didn't change in comparison to active key configurations - # leave conf ID from active key ID - # reconnect type table with sig_config table in the case if certain run type configuration is going to change - if changePhysics==1 and ConfigRunTypeDict[i]=='Physics': - equal_config_id = getConfigIdForMCK('oracle', activeKeyMCK, 'Physics', ConfigNameDict[i], ConfigRecostepDict[i]) - c.execute("INSERT INTO type2sig VALUES ("+str(newPhysicsId)+","+str(equal_config_id)+")") - elif changeStandby==1 and ConfigRunTypeDict[i]=='Standby': - equal_config_id = getConfigIdForMCK('oracle', activeKeyMCK, 'Standby', ConfigNameDict[i], ConfigRecostepDict[i]) - c.execute("INSERT INTO type2sig VALUES ("+str(newStandbyId)+","+str(equal_config_id)+")") - elif changeCosmics==1 and ConfigRunTypeDict[i]=='Cosmics': - equal_config_id = getConfigIdForMCK('oracle', activeKeyMCK, 'Cosmics', ConfigNameDict[i], ConfigRecostepDict[i]) - c.execute("INSERT INTO type2sig VALUES ("+str(newCosmicsId)+","+str(equal_config_id)+")") - - else: - uploaded_MCK=activeKeyMCK - print "Configuration hasn't changed, uploaded configuration is equal to the active configuration. We keep an old active MCK number" - print "Uploaded MCK == Current active MCK: ", uploaded_MCK - - - conn.commit() - conn.close() - - return uploaded_MCK - diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/java/GUI/TrigMaMGUI.java b/Trigger/TrigMonitoring/TrigHLTMonitoring/java/GUI/TrigMaMGUI.java index c1fa172db385..72a809f0c7d0 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/java/GUI/TrigMaMGUI.java +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/java/GUI/TrigMaMGUI.java @@ -1,17 +1,17 @@ //// -//// GUI for TriggerHLTMonitoring MenuAwareMonitoring +//// GUI for TriggerHLTMonitoring MenuAwareMonitoring //// built by Xanthe Hoad (xhoad@cern.ch) //// -import java.awt.*; -import java.awt.event.*; -import javax.swing.*; -import javax.swing.filechooser.*; +import java.awt.*; +import java.awt.event.*; +import javax.swing.*; +import javax.swing.filechooser.*; import javax.swing.JFileChooser; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import javax.swing.text.*; -import java.io.File; +import java.io.File; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -21,472 +21,589 @@ import java.io.Writer; import java.io.Reader; import java.util.concurrent.ExecutionException; import java.util.List; +import java.util.ArrayList; +import java.util.Collections; import java.lang.ProcessBuilder; +import javax.swing.event.ChangeEvent; +import javax.swing.event.ChangeListener; public class TrigMaMGUI extends JFrame { - - private int MCK_ID; - private int SMK_ID; - private int clone_MCK_ID; - private String MCK_SMK_link_comment; - private JTextArea GUI_messenger; - private String jsonFilename; - private File jsonSelectedFile; - private String SMCK_list; - private String clone_release; - private String clone_version; - - static public Boolean print_to_terminal; - - public TrigMaMGUI () { - - print_to_terminal = true; - - //Set up - setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Exit program if close-window button clicked - setTitle("Trigger MaM User Interface"); // "this" JFrame sets title - setSize(520, 550); // "this" JFrame sets initial size - - // Retrieve the content-pane of the top-level container JFrame - // All operations done on the content-pane - Container cp = getContentPane(); - cp.setLayout(new GridLayout(0, 1, 1, 1)); - - JPanel SMK_MCK_link_panel = new JPanel(new GridLayout(0, 1, 1, 1)); - cp.add(SMK_MCK_link_panel); - { - JPanel SMK_MCK_header_panel = new JPanel(); - SMK_MCK_link_panel.add(SMK_MCK_header_panel); - JLabel SMK_MCK_header = new JLabel("Make a new SMK-MCK link:"); - SMK_MCK_header_panel.add(SMK_MCK_header); - - JPanel SMK_MCK_link_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - SMK_MCK_link_panel.add(SMK_MCK_link_subpanel_1); - SMK_MCK_link_subpanel_1.add(new JLabel("SMK ID:")); - final JTextField SMK_ID_field = new JTextField(5); - SMK_MCK_link_subpanel_1.add(SMK_ID_field); - SMK_ID = 0; - SMK_MCK_link_subpanel_1.add(new JLabel("MCK ID:")); - final JTextField MCK_ID_field = new JTextField(5); - SMK_MCK_link_subpanel_1.add(MCK_ID_field); - MCK_ID = 0; - - SMK_MCK_link_subpanel_1.add(new JLabel("Comment:")); - final JTextArea linkComment = new JTextArea(""); - linkComment.setEditable(true); - linkComment.setLineWrap(true); - linkComment.setWrapStyleWord(true); - JScrollPane scrollPane = new JScrollPane(linkComment); - scrollPane.setPreferredSize(new Dimension(200, 30)); - SMK_MCK_link_subpanel_1.add(scrollPane); - JButton SMK_MCK_link_button = new JButton("Make SMK-MCK link"); - - JPanel SMK_MCK_link_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - SMK_MCK_link_panel.add(SMK_MCK_link_subpanel_2); - SMK_MCK_link_subpanel_2.add(SMK_MCK_link_button); - final JCheckBox force_link_checkbox = new JCheckBox("Force link upload (only tick if certain you want to do this)"); - SMK_MCK_link_subpanel_2.add(force_link_checkbox); - - SMK_MCK_link_button.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - Boolean GoodValsFlag = true; //we have good MCK and SMK values unless they are found to be bad - - try { SMK_ID = Integer.parseInt(SMK_ID_field.getText().trim()); } - catch (NumberFormatException nfe) { GUI_messenger.append("\nBad SMK ID, must be an integer. "); GoodValsFlag = false; } - try { MCK_ID = Integer.parseInt(MCK_ID_field.getText().trim()); } - catch (NumberFormatException nfe) { GUI_messenger.append("\nBad MCK ID, must be an integer. "); GoodValsFlag = false; } - MCK_SMK_link_comment = linkComment.getText(); - - if ( !MCK_SMK_link_comment.contains("\"\"\"") && !MCK_SMK_link_comment.contains("'''") && !MCK_SMK_link_comment.contains("'") && !MCK_SMK_link_comment.matches("(.*)\"(.*)") && GoodValsFlag == true ) { - GUI_messenger.append("\nSMK " + SMK_ID + " and MCK " + MCK_ID +" are both integers, as required."); - - if ( force_link_checkbox.isSelected() ) { - - Object[] options = {"Yes", "Cancel"}; - int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to overide any existing links to: \nSMK ID:"+SMK_ID+" and create a link to MCK ID: "+MCK_ID+"? \nPlease be sure you want to do this. If you are unsure, click 'Cancel'.", - "Confirm SMK-MCK link override", - JOptionPane.YES_NO_OPTION, - JOptionPane.PLAIN_MESSAGE, - null, options, options[1]); - if (dialogResult == JOptionPane.YES_OPTION){ - try { - - String link_command = "from MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;mam = MenuAwareMonitoringStandalone();mam.force_deactivate_all_links_for_smk("+Integer.toString(SMK_ID)+",True);"; - ProcessBuilder submit_link = new ProcessBuilder("python", "-c", link_command); - - Process submit_link_proc = submit_link.start(); - - InputStream link_stdout = submit_link_proc.getInputStream(); - InputStream link_stderr = submit_link_proc.getErrorStream(); - Thread link_threadOut = new Thread( new MyInputStreamSink( link_stdout, "out", GUI_messenger )); - Thread link_threadErr = new Thread( new MyInputStreamSink( link_stderr, "err", null )); - link_threadOut.setDaemon(true); - link_threadErr.setDaemon(true); - link_threadOut.setName( String.format("link_stdout reader" )); - link_threadErr.setName( String.format("link_stderr reader" )); - link_threadOut.start(); - link_threadErr.start(); - - } catch (IOException err) { - } - } - else if (dialogResult == JOptionPane.NO_OPTION){ - GUI_messenger.append("\nForce upload cancelled."); - force_link_checkbox.setSelected(false); - } - - } - Object[] options = {"Yes", "Cancel"}; - int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to link: \nSMK ID:"+SMK_ID+" with MCK ID: "+MCK_ID+"\nwith comment: "+MCK_SMK_link_comment, - "Confirm SMK-MCK link", - JOptionPane.YES_NO_OPTION, - JOptionPane.PLAIN_MESSAGE, - null, options, options[1]); - if (dialogResult == JOptionPane.YES_OPTION){ - try { - - String link_command = "from MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;mam = MenuAwareMonitoringStandalone();mam.link_smk_to_mck("+Integer.toString(SMK_ID)+","+Integer.toString(MCK_ID)+",'"+MCK_SMK_link_comment+"');"; - ProcessBuilder submit_link = new ProcessBuilder("python", "-c", link_command); - - Process submit_link_proc = submit_link.start(); - - InputStream link_stdout = submit_link_proc.getInputStream(); - InputStream link_stderr = submit_link_proc.getErrorStream(); - Thread link_threadOut = new Thread( new MyInputStreamSink( link_stdout, "out", GUI_messenger )); - Thread link_threadErr = new Thread( new MyInputStreamSink( link_stderr, "err", null )); - link_threadOut.setDaemon(true); - link_threadErr.setDaemon(true); - link_threadOut.setName( String.format("link_stdout reader" )); - link_threadErr.setName( String.format("link_stderr reader" )); - link_threadOut.start(); - link_threadErr.start(); - - } catch (IOException err) { - } - } - else if (dialogResult == JOptionPane.NO_OPTION){ - GUI_messenger.append("\nLink upload cancelled."); - force_link_checkbox.setSelected(false); - } - } else if ( MCK_SMK_link_comment.contains("\"") || MCK_SMK_link_comment.contains("\"\"\"") || MCK_SMK_link_comment.contains("'''") || MCK_SMK_link_comment.contains("'") || MCK_SMK_link_comment.matches("(.*)\"(.*)") ) { - GUI_messenger.append("\nComment contains \' or \", please remove these characters."); - force_link_checkbox.setSelected(false); - } } - }); + + private int MCK_ID = 0; + private int SMK_ID= 0; + private String MCK_SMK_link_comment = ""; + private String Make_MCK_comment = ""; + private JTextArea GUI_messenger; + private String jsonFilename = ""; + private File jsonSelectedFile; + private String SMCK_list = ""; + private int clone_MCK_ID = 0; + private String clone_release = ""; + private String clone_version = ""; + + static private JComboBox<Integer> SMKbox = new JComboBox<Integer>(); + static private JComboBox<Integer> MCKbox = new JComboBox<Integer>(); + static private JComboBox<Integer> CloneMCKbox = new JComboBox<Integer>(); + static private JComboBox<Integer> ViewSMKbox = new JComboBox<Integer>(); + static private JComboBox<Integer> ViewMCKbox = new JComboBox<Integer>(); + static private JComboBox<Integer> ViewSMCKbox = new JComboBox<Integer>(); + static private String dbalias; + static public Boolean print_to_terminal = false; + + public TrigMaMGUI() { + + //Set up + setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Exit program if close-window button clicked + setTitle("Trigger Menu-aware Monitoring (MaM)"); + setSize(600, 600); + setLocationByPlatform(true); + setVisible(true); + + // Retrieve the content-pane of the top-level container JFrame + // All operations done on the content-pane + Container cp = getContentPane(); + //cp.setLayout(new GridLayout(0, 1, 1, 1)); + cp.setLayout(new GridBagLayout()); + GridBagConstraints c = new GridBagConstraints(); + c.fill = GridBagConstraints.BOTH; + c.weightx = 1; + + // Link an MCK to an SMK + JPanel SMK_MCK_link_panel = new JPanel(new GridLayout(0, 1, 1, 1)); + { + JPanel SMK_MCK_header_panel = new JPanel(); + SMK_MCK_link_panel.add(SMK_MCK_header_panel); + JLabel SMK_MCK_header = new JLabel("Make a new SMK-MCK link"); + SMK_MCK_header_panel.add(SMK_MCK_header); + + JPanel SMK_MCK_link_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + SMK_MCK_link_panel.add(SMK_MCK_link_subpanel_1); + SMK_MCK_link_subpanel_1.add(new JLabel("SMK ID:")); + RefreshComboBox( SMKbox, "smk" ); + SMK_MCK_link_subpanel_1.add(SMKbox); + SMK_MCK_link_subpanel_1.add(new JLabel("MCK ID:")); + RefreshComboBox( MCKbox, "mck" ); + SMK_MCK_link_subpanel_1.add(MCKbox); + SMK_MCK_link_subpanel_1.add(new JLabel("Comment:")); + final JTextArea linkComment = new JTextArea(""); + linkComment.setEditable(true); + linkComment.setLineWrap(true); + linkComment.setWrapStyleWord(true); + JScrollPane scrollPane = new JScrollPane(linkComment); + scrollPane.setPreferredSize(new Dimension(200, 30)); + SMK_MCK_link_subpanel_1.add(scrollPane); + JButton SMK_MCK_link_button = new JButton("Make SMK-MCK link"); + + JPanel SMK_MCK_link_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + SMK_MCK_link_panel.add(SMK_MCK_link_subpanel_2); + SMK_MCK_link_subpanel_2.add(SMK_MCK_link_button); + final JCheckBox force_link_checkbox = new JCheckBox("Force link upload (only tick if you are certain you want to do this)"); + SMK_MCK_link_subpanel_2.add(force_link_checkbox); + + SMK_MCK_link_button.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + + SMK_ID = (Integer)SMKbox.getSelectedItem(); + MCK_ID = (Integer)MCKbox.getSelectedItem(); + MCK_SMK_link_comment = linkComment.getText(); + + if ( !MCK_SMK_link_comment.contains("\"\"\"") && !MCK_SMK_link_comment.contains("'''") && !MCK_SMK_link_comment.contains("'") && !MCK_SMK_link_comment.matches("(.*)\"(.*)") ) { + + if ( force_link_checkbox.isSelected() ) { + Object[] options = {"Yes", "Cancel"}; + int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to overide any existing links to: \nSMK ID: "+SMK_ID+" and create a link to MCK ID: "+MCK_ID+"? \nPlease be sure you want to do this. If you are unsure, click 'Cancel'.", + "Confirm SMK-MCK link override", + JOptionPane.YES_NO_OPTION, + JOptionPane.PLAIN_MESSAGE, + null, options, options[1]); + + if (dialogResult == JOptionPane.YES_OPTION) { + + String link_command = "from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;ms = MenuAwareMonitoringStandalone('"+dbalias+"');ms.force_deactivate_all_links_for_smk("+Integer.toString(SMK_ID)+",True);"; + + RunProcess(link_command); + + } else if (dialogResult == JOptionPane.NO_OPTION) { + GUI_messenger.append("\nForce upload cancelled."); + force_link_checkbox.setSelected(false); + } + + } + + Object[] options = {"Yes", "Cancel"}; + int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to link SMK ID: "+SMK_ID+" with MCK ID: "+MCK_ID+"\nwith comment: "+MCK_SMK_link_comment+"?", + "Confirm SMK-MCK link", + JOptionPane.YES_NO_OPTION, + JOptionPane.PLAIN_MESSAGE, + null, options, options[1]); + if (dialogResult == JOptionPane.YES_OPTION) { + + String link_command = "from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;ms = MenuAwareMonitoringStandalone('"+dbalias+"');ms.link_smk_to_mck("+Integer.toString(SMK_ID)+","+Integer.toString(MCK_ID)+",'"+MCK_SMK_link_comment+" ');"; + + RunProcess(link_command); + + } else if (dialogResult == JOptionPane.NO_OPTION) { + GUI_messenger.append("\nLink upload cancelled."); + force_link_checkbox.setSelected(false); + } + } else { + GUI_messenger.append("\nComment contains \' or \", please remove these characters."); + force_link_checkbox.setSelected(false); + } } - - JPanel Make_MCK_panel = new JPanel(new GridLayout(0, 1, 1, 1)); - cp.add(Make_MCK_panel); - { - JPanel Make_MCK_header_panel = new JPanel(); - Make_MCK_panel.add(Make_MCK_header_panel); - JLabel Make_MCK_header = new JLabel("Make a new MCK:"); - Make_MCK_header_panel.add(Make_MCK_header); - - JPanel Make_MCK_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Make_MCK_panel.add(Make_MCK_subpanel_1); - Make_MCK_subpanel_1.add(new JLabel("Comma seperated SMCK list:")); - final JTextArea MCKlistField = new JTextArea(""); - MCKlistField.setEditable(true); - MCKlistField.setLineWrap(true); - MCKlistField.setWrapStyleWord(true); - JScrollPane MCKlistScrollPane = new JScrollPane(MCKlistField); - MCKlistScrollPane.setPreferredSize(new Dimension(290, 30)); - Make_MCK_subpanel_1.add(MCKlistScrollPane); - SMCK_list = ""; - - JPanel Make_MCK_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Make_MCK_panel.add(Make_MCK_subpanel_2); - JButton Make_MCK_button = new JButton("Make new MCK"); - Make_MCK_subpanel_2.add(Make_MCK_button); - - Make_MCK_button.addActionListener(new ActionListener() { //Make MCK button pressed - public void actionPerformed(ActionEvent e) { - - SMCK_list = MCKlistField.getText().trim(); - if ( SMCK_list.isEmpty() == true ) { - GUI_messenger.append("\nPlease add a comma seperated list of SMCKs."); - } else { - Object[] options = {"Yes", - "Cancel"}; - int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to join the listed SMCKs?", - "Confirm", - JOptionPane.YES_NO_OPTION, - JOptionPane.PLAIN_MESSAGE, - null, options, options[1]); - if (dialogResult == JOptionPane.YES_OPTION){ - try { - - String Make_MCK_command = "from MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;mam = MenuAwareMonitoringStandalone();mam.upload_mck(["+SMCK_list+"]);"; - ProcessBuilder make_mck = new ProcessBuilder("python", "-c", Make_MCK_command); - - Process make_mck_proc = make_mck.start(); - - InputStream make_mck_stdout = make_mck_proc.getInputStream(); - InputStream make_mck_stderr = make_mck_proc.getErrorStream(); - Thread make_mck_threadOut = new Thread( new MyInputStreamSink( make_mck_stdout, "out", GUI_messenger )); - Thread make_mck_threadErr = new Thread( new MyInputStreamSink( make_mck_stderr, "err", null )); - make_mck_threadOut.setDaemon(true); - make_mck_threadErr.setDaemon(true); - make_mck_threadOut.setName( String.format("make_mck_stdout reader" )); - make_mck_threadErr.setName( String.format("make_mck_stderr reader" )); - make_mck_threadOut.start(); - make_mck_threadErr.start(); - - } catch (IOException err) { - } - } - } - } - - }); - + }); + } + + //Make an MCK from some existing SMCK + JPanel Make_MCK_panel = new JPanel(new GridLayout(0, 1, 1, 1)); + { + JPanel Make_MCK_header_panel = new JPanel(); + Make_MCK_panel.add(Make_MCK_header_panel); + JLabel Make_MCK_header = new JLabel("Make an MCK from a list of SMCK"); + Make_MCK_header_panel.add(Make_MCK_header); + + JPanel Make_MCK_subpanel_1 = new JPanel(new BorderLayout()); + Make_MCK_panel.add(Make_MCK_subpanel_1); + JPanel Make_MCK_subpanel_1_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + Make_MCK_subpanel_1_1.add(new JLabel("Comma seperated SMCK list:")); + final JTextArea MCKlistField = new JTextArea(""); + MCKlistField.setEditable(true); + MCKlistField.setLineWrap(true); + MCKlistField.setWrapStyleWord(true); + JScrollPane MCKlistScrollPane = new JScrollPane(MCKlistField); + MCKlistScrollPane.setPreferredSize(new Dimension(290, 30)); + Make_MCK_subpanel_1_1.add(MCKlistScrollPane); + JPanel Make_MCK_subpanel_1_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + Make_MCK_subpanel_1_2.add(new JLabel("Comment:")); + final JTextArea MakeMCKcommentField = new JTextArea(""); + MakeMCKcommentField.setEditable(true); + MakeMCKcommentField.setLineWrap(true); + MakeMCKcommentField.setWrapStyleWord(true); + JScrollPane Make_MCK_scrollPane = new JScrollPane(MakeMCKcommentField); + Make_MCK_scrollPane.setPreferredSize(new Dimension(290, 30)); + + Make_MCK_subpanel_1_2.add(Make_MCK_scrollPane); + Make_MCK_subpanel_1.add(Make_MCK_subpanel_1_1, BorderLayout.NORTH); + Make_MCK_subpanel_1.add(Make_MCK_subpanel_1_2, BorderLayout.SOUTH); + + JPanel Make_MCK_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + Make_MCK_panel.add(Make_MCK_subpanel_2); + JButton Make_MCK_button = new JButton("Make new MCK"); + Make_MCK_subpanel_2.add(Make_MCK_button); + + Make_MCK_button.addActionListener(new ActionListener() { //Make MCK button pressed + public void actionPerformed(ActionEvent e) { + + SMCK_list = MCKlistField.getText().trim(); + if ( SMCK_list.isEmpty() == true ) { + GUI_messenger.append("\nPlease add a comma seperated list of SMCKs."); + } else { + Make_MCK_comment = MakeMCKcommentField.getText(); + if ( !Make_MCK_comment.contains("\"\"\"") && !Make_MCK_comment.contains("'''") && !Make_MCK_comment.contains("'") && !Make_MCK_comment.matches("(.*)\"(.*)") ) { + Object[] options = {"Yes", "Cancel"}; + int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to join the listed SMCKs:\n"+SMCK_list+"\nwith comment: "+Make_MCK_comment, + "Confirm", + JOptionPane.YES_NO_OPTION, + JOptionPane.PLAIN_MESSAGE, + null, options, options[1]); + if (dialogResult == JOptionPane.YES_OPTION) { + + String Make_MCK_command = "from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;ms = MenuAwareMonitoringStandalone('"+dbalias+"');ms.upload_mck(["+SMCK_list+"],'"+Make_MCK_comment+" ');"; + + RunProcess(Make_MCK_command); + + } + } + } } - - JPanel Clone_MCK_panel = new JPanel(new GridLayout(0, 1, 1, 1)); - cp.add(Clone_MCK_panel); - { - JPanel Clone_MCK_header_panel = new JPanel(); - Clone_MCK_panel.add(Clone_MCK_header_panel); - JLabel Clone_MCK_header = new JLabel("Clone an MCK for use in a different release:"); - Clone_MCK_header_panel.add(Clone_MCK_header); - - JPanel Clone_MCK_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Clone_MCK_panel.add(Clone_MCK_subpanel_1); - JPanel Clone_MCK_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Clone_MCK_panel.add(Clone_MCK_subpanel_1); - Clone_MCK_subpanel_1.add(new JLabel("MCK ID:")); - final JTextField MCK_ID_field = new JTextField(5); - Clone_MCK_subpanel_1.add(MCK_ID_field); - clone_MCK_ID = 0; - Clone_MCK_subpanel_1.add(new JLabel("ATLAS release:")); - final JTextField release_field = new JTextField(8); - Clone_MCK_subpanel_1.add(release_field); - clone_release = ""; - Clone_MCK_subpanel_1.add(new JLabel("ATLAS version:")); - final JTextField version_field = new JTextField(8); - Clone_MCK_subpanel_1.add(version_field); - clone_version = ""; - - JPanel Clone_MCK_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Clone_MCK_panel.add(Clone_MCK_subpanel_2); - JButton Clone_MCK_button = new JButton("Clone MCK"); - Clone_MCK_subpanel_2.add(Clone_MCK_button); - - Clone_MCK_button.addActionListener(new ActionListener() { //Clone MCK button pressed - public void actionPerformed(ActionEvent e) { - - Boolean GoodValsFlag = true; //we have good MCK and SMK values unless they are found to be bad - try { clone_MCK_ID = Integer.parseInt(MCK_ID_field.getText().trim()); } - catch (NumberFormatException nfe) { GUI_messenger.append("\nBad MCK ID, must be an integer. "); GoodValsFlag = false; } - - clone_release = release_field.getText().trim(); - clone_version = version_field.getText().trim(); - if ( clone_release.isEmpty() == true || clone_version.isEmpty() ) { - GUI_messenger.append("\nPlease add the release and version that you want the new MCK to be valid in."); - } else { - if ( GoodValsFlag == true ) { - Object[] options = {"Yes", - "Cancel"}; - int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to clone the entered MCK?", - "Confirm", - JOptionPane.YES_NO_OPTION, - JOptionPane.PLAIN_MESSAGE, - null, options, options[1]); - if (dialogResult == JOptionPane.YES_OPTION){ - try { - - String Clone_MCK_command = "from MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;mam = MenuAwareMonitoringStandalone();mam.clone_mck_for_new_release("+clone_MCK_ID+",'"+clone_release+"','"+clone_version+"');"; - ProcessBuilder clone_mck = new ProcessBuilder("python", "-c", Clone_MCK_command); - - Process clone_mck_proc = clone_mck.start(); - - InputStream clone_mck_stdout = clone_mck_proc.getInputStream(); - InputStream clone_mck_stderr = clone_mck_proc.getErrorStream(); - Thread clone_mck_threadOut = new Thread( new MyInputStreamSink( clone_mck_stdout, "out", GUI_messenger )); - Thread clone_mck_threadErr = new Thread( new MyInputStreamSink( clone_mck_stderr, "err", null )); - clone_mck_threadOut.setDaemon(true); - clone_mck_threadErr.setDaemon(true); - clone_mck_threadOut.setName( String.format("clone_mck_stdout reader" )); - clone_mck_threadErr.setName( String.format("clone_mck_stderr reader" )); - clone_mck_threadOut.start(); - clone_mck_threadErr.start(); - - } catch (IOException err) { - } - } - } - } - } - }); - + }); + } + + //Clone and MCK and assign a new release + JPanel Clone_MCK_panel = new JPanel(new GridLayout(0, 1, 1, 1)); + { + JPanel Clone_MCK_header_panel = new JPanel(); + Clone_MCK_panel.add(Clone_MCK_header_panel); + JLabel Clone_MCK_header = new JLabel("Clone an existing MCK to use it in a different release"); + Clone_MCK_header_panel.add(Clone_MCK_header); + + JPanel Clone_MCK_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + Clone_MCK_panel.add(Clone_MCK_subpanel_1); + Clone_MCK_subpanel_1.add(new JLabel("MCK ID:")); + RefreshComboBox( CloneMCKbox, "mck" ); + Clone_MCK_subpanel_1.add(CloneMCKbox); + Clone_MCK_subpanel_1.add(new JLabel("ATLAS project:")); + final JTextField release_field = new JTextField(8); + Clone_MCK_subpanel_1.add(release_field); + Clone_MCK_subpanel_1.add(new JLabel("ATLAS version:")); + final JTextField version_field = new JTextField(8); + Clone_MCK_subpanel_1.add(version_field); + + JPanel Clone_MCK_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + Clone_MCK_panel.add(Clone_MCK_subpanel_2); + JButton Clone_MCK_button = new JButton("Clone MCK"); + Clone_MCK_subpanel_2.add(Clone_MCK_button); + + Clone_MCK_button.addActionListener(new ActionListener() { //Clone MCK button pressed + public void actionPerformed(ActionEvent e) { + + clone_MCK_ID = (Integer)CloneMCKbox.getSelectedItem(); + clone_release = release_field.getText().trim(); + clone_version = version_field.getText().trim(); + + if ( clone_release.isEmpty() == true || clone_version.isEmpty() ) { + GUI_messenger.append("\nPlease add the project and version that you want the new MCK to be valid in."); + } else { + Object[] options = {"Yes", "Cancel"}; + int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to clone MCK: "+clone_MCK_ID+"\nand assign the clone to be valid in: "+clone_release+"-"+clone_version+"?", + "Confirm", + JOptionPane.YES_NO_OPTION, + JOptionPane.PLAIN_MESSAGE, + null, options, options[1]); + if (dialogResult == JOptionPane.YES_OPTION) { + + String Clone_MCK_command = "from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;ms = MenuAwareMonitoringStandalone('"+dbalias+"');ms.clone_mck_for_new_release("+clone_MCK_ID+",'"+clone_release+"','"+clone_version+"');"; + + RunProcess(Clone_MCK_command); + + } else if (dialogResult == JOptionPane.NO_OPTION) { + GUI_messenger.append("\nCloning cancelled."); + } + } } - - JPanel Upload_json_panel = new JPanel(new GridLayout(0, 1, 1, 1)); - cp.add(Upload_json_panel); - { - JPanel json_header_panel = new JPanel(); - Upload_json_panel.add(json_header_panel); - JLabel json_header = new JLabel("Upload a .json file to the database:"); - json_header_panel.add(json_header); - - JPanel Upload_json_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Upload_json_panel.add(Upload_json_subpanel_1); - Upload_json_subpanel_1.add(new JLabel(".json file to upload:")); - final JFileChooser jsonChooser = new JFileChooser(); - final JTextField jsonFilenameField = new JTextField(32); - jsonFilenameField.setEditable(false); - Upload_json_subpanel_1.add(jsonFilenameField); - - JPanel Upload_json_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Upload_json_panel.add(Upload_json_subpanel_2); - JButton jsonChooserButton = new JButton("Choose file"); - Upload_json_subpanel_2.add(jsonChooserButton); - final JButton submitFile = new JButton("Submit file"); - submitFile.setEnabled(false); //Cannot submit file until a file has been picked - Upload_json_subpanel_2.add(submitFile); - - jsonChooserButton.addActionListener(new ActionListener() { //Choose file button pressed - public void actionPerformed(ActionEvent e) { - int chooserResult = jsonChooser.showOpenDialog((Component)e.getSource()); //Open file chooser - if (chooserResult == JFileChooser.APPROVE_OPTION) { - jsonSelectedFile = jsonChooser.getSelectedFile(); - jsonFilename = jsonSelectedFile.getAbsolutePath(); - jsonFilenameField.setText(jsonFilename+"\n"); - submitFile.setEnabled(true); - } - } - }); - - submitFile.addActionListener(new ActionListener() { //Submit file button pressed - public void actionPerformed(ActionEvent e) { - - if (jsonSelectedFile.exists() && !jsonSelectedFile.isDirectory() /*&& release != null*/ ) { - String extension = jsonFilename.substring(jsonFilename.lastIndexOf("."),jsonFilename.length()); - String file_with_extension = jsonFilename.substring(jsonFilename.lastIndexOf("/")+1,jsonFilename.length()); - if (".json".equalsIgnoreCase(extension)) { - GUI_messenger.append("\nGood file extension "+extension+"."); - - Object[] options = {"Yes","Cancel"}; - - int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to upload "+file_with_extension+" to the database?", - - "Confirm file upload", - JOptionPane.YES_NO_OPTION, - JOptionPane.PLAIN_MESSAGE, - null, options, options[1]); - if (dialogResult == JOptionPane.YES_OPTION){ - try { - - String json_command = "from MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;mam = MenuAwareMonitoringStandalone();mam.upload_config_from_json('"+jsonFilename+"');"; - ProcessBuilder submit_json = new ProcessBuilder("python", "-c", json_command); - - Process submit_json_proc = submit_json.start(); - - InputStream json_stdout = submit_json_proc.getInputStream(); - InputStream json_stderr = submit_json_proc.getErrorStream(); - Thread json_threadOut = new Thread( new MyInputStreamSink( json_stdout, "out", GUI_messenger )); - Thread json_threadErr = new Thread( new MyInputStreamSink( json_stderr, "err", null )); - json_threadOut.setDaemon(true); - json_threadErr.setDaemon(true); - json_threadOut.setName( String.format("json_stdout reader" )); - json_threadErr.setName( String.format("json_stderr reader" )); - json_threadOut.start(); - json_threadErr.start(); - - } catch (IOException err) { - } - } - else if (dialogResult == JOptionPane.NO_OPTION){ - GUI_messenger.append("\n"+file_with_extension+" upload cancelled."); - } - } - else GUI_messenger.append("\nWrong file extension ("+extension+"): require .json."); - } - } - }); + }); + } + + //Upload SMCK and MCK to the DB from a json file + JPanel Upload_json_panel = new JPanel(new GridLayout(0, 1, 1, 1)); + { + JPanel json_header_panel = new JPanel(); + Upload_json_panel.add(json_header_panel); + JLabel json_header = new JLabel("Upload a .json file to the database to create SMCK and MCK"); + json_header_panel.add(json_header); + + JPanel Upload_json_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + Upload_json_panel.add(Upload_json_subpanel_1); + Upload_json_subpanel_1.add(new JLabel(".json file to upload:")); + final JFileChooser jsonChooser = new JFileChooser(); + final JTextField jsonFilenameField = new JTextField(32); + jsonFilenameField.setEditable(false); + Upload_json_subpanel_1.add(jsonFilenameField); + + JPanel Upload_json_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + Upload_json_panel.add(Upload_json_subpanel_2); + JButton jsonChooserButton = new JButton("Choose file"); + Upload_json_subpanel_2.add(jsonChooserButton); + final JButton submitFile = new JButton("Submit file"); + submitFile.setEnabled(false); //Cannot submit file until a file has been picked + Upload_json_subpanel_2.add(submitFile); + + jsonChooserButton.addActionListener(new ActionListener() { //Choose file button pressed + public void actionPerformed(ActionEvent e) { + int chooserResult = jsonChooser.showOpenDialog((Component)e.getSource()); //Open file chooser + if (chooserResult == JFileChooser.APPROVE_OPTION) { + jsonSelectedFile = jsonChooser.getSelectedFile(); + jsonFilename = jsonSelectedFile.getAbsolutePath(); + jsonFilenameField.setText(jsonFilename+"\n"); + submitFile.setEnabled(true); + } } - - JPanel Messenger_panel = new JPanel(new GridLayout(0, 1, 1, 1)); - cp.add(Messenger_panel); - { - GUI_messenger = new JTextArea(""); - GUI_messenger.setEditable(false); - GUI_messenger.setLineWrap(true); - GUI_messenger.setWrapStyleWord(true); - JScrollPane messenger_scrollPane = new JScrollPane(GUI_messenger); - messenger_scrollPane.setPreferredSize(new Dimension(200, 30)); - DefaultCaret GUI_caret = (DefaultCaret)GUI_messenger.getCaret(); - GUI_caret.setUpdatePolicy(DefaultCaret.ALWAYS_UPDATE); - Messenger_panel.add(messenger_scrollPane); - + }); + + submitFile.addActionListener(new ActionListener() { //Submit file button pressed + public void actionPerformed(ActionEvent e) { + + if (jsonSelectedFile.exists() && !jsonSelectedFile.isDirectory()) { + String extension = jsonFilename.substring(jsonFilename.lastIndexOf("."),jsonFilename.length()); + String file_with_extension = jsonFilename.substring(jsonFilename.lastIndexOf("/")+1,jsonFilename.length()); + if (".json".equalsIgnoreCase(extension)) { + GUI_messenger.append("\nGood file extension "+extension+"."); + Object[] options = {"Yes", "Cancel"}; + + int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to upload "+file_with_extension+" to the database?", + "Confirm file upload", + JOptionPane.YES_NO_OPTION, + JOptionPane.PLAIN_MESSAGE, + null, options, options[1]); + if (dialogResult == JOptionPane.YES_OPTION) { + + String json_command = "from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;ms = MenuAwareMonitoringStandalone('"+dbalias+"');ms.upload_config_from_json('"+jsonFilename+"');"; + + RunProcess(json_command); + + } else if (dialogResult == JOptionPane.NO_OPTION) { + GUI_messenger.append("\n"+file_with_extension+" upload cancelled."); + } + } + else GUI_messenger.append("\nWrong file extension ("+extension+"): require .json."); + } + } + }); + } + + //View SMK, MCK and SMCK + JPanel View_keys_panel = new JPanel(new GridLayout(0, 1, 1, 1)); + { + JPanel View_keys_header_panel = new JPanel(); + View_keys_panel.add(View_keys_header_panel); + JLabel View_keys_header = new JLabel("View contents of keys"); + View_keys_header_panel.add(View_keys_header); + + JPanel View_keys_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + View_keys_panel.add(View_keys_subpanel_1); + View_keys_subpanel_1.add(new JLabel("SMK ID:")); + RefreshComboBox( ViewSMKbox, "smk" ); + View_keys_subpanel_1.add(ViewSMKbox); + View_keys_subpanel_1.add(new JLabel("MCK ID:")); + RefreshComboBox( ViewMCKbox, "mck" ); + View_keys_subpanel_1.add(ViewMCKbox); + View_keys_subpanel_1.add(new JLabel("SMCK ID:")); + RefreshComboBox( ViewSMCKbox, "smck" ); + View_keys_subpanel_1.add(ViewSMCKbox); + + JPanel View_keys_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); + View_keys_panel.add(View_keys_subpanel_2); + JButton ViewSMKbutton = new JButton("View SMK"); + View_keys_subpanel_2.add(ViewSMKbutton); + JButton ViewMCKbutton = new JButton("View MCK"); + View_keys_subpanel_2.add(ViewMCKbutton); + JButton ViewSMCKbutton = new JButton("View SMCK"); + View_keys_subpanel_2.add(ViewSMCKbutton); + + ViewSMKbutton.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + //Get the selected SMK info + Integer view_SMK_ID = (Integer)ViewSMKbox.getSelectedItem(); + String viewSMKcommand = "from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;ms = MenuAwareMonitoringStandalone('"+dbalias+"');ms.search('SMK_ID','"+view_SMK_ID+"');"; + RunProcess(viewSMKcommand); + } + }); + + ViewMCKbutton.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + //Get the selected MCK info + Integer view_MCK_ID = (Integer)ViewMCKbox.getSelectedItem(); + String viewMCKcommand = "from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;ms = MenuAwareMonitoringStandalone('"+dbalias+"');ms.search('MCK_ID','"+view_MCK_ID+"');"; + RunProcess(viewMCKcommand); + } + }); + + ViewSMCKbutton.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + //Get the selected MCK info + Integer view_SMCK_ID = (Integer)ViewSMCKbox.getSelectedItem(); + String viewSMCKcommand = "from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;ms = MenuAwareMonitoringStandalone('"+dbalias+"');ms.search('SMCK_ID','"+view_SMCK_ID+"');"; + RunProcess(viewSMCKcommand); } - + }); } - - private static class MyInputStreamSink implements Runnable { - private InputStream m_in; - private String m_streamName; - private JTextArea m_outDest; - - MyInputStreamSink( InputStream in, String streamName, JTextArea outDest ) { - m_in = in; - m_streamName = streamName; - m_outDest = outDest; + + //Setting up the tabbed pane + JTabbedPane tabbedPane = new JTabbedPane(); + //Add the tabbed pane to the container. + c.weighty = 0.2; + c.gridy = 0; + c.anchor = GridBagConstraints.PAGE_START; + cp.add(tabbedPane,c); + //The following line enables to use scrolling tabs. + tabbedPane.setTabLayoutPolicy(JTabbedPane.SCROLL_TAB_LAYOUT); + + tabbedPane.addTab("MCK-SMK link", null, SMK_MCK_link_panel, "Link an MCK to an SMK"); + tabbedPane.setMnemonicAt(0, KeyEvent.VK_1); + + tabbedPane.addTab("Make MCK", null, Make_MCK_panel, "Make an MCK from a list of SMCK"); + tabbedPane.setMnemonicAt(1, KeyEvent.VK_2); + + tabbedPane.addTab("Clone MCK", null, Clone_MCK_panel, "Clone an MCK and assign it to a different release"); + tabbedPane.setMnemonicAt(2, KeyEvent.VK_3); + + tabbedPane.addTab("Upload .json", null, Upload_json_panel, "Upload a .json file to the DB to create SMCKs (and an MCK linking all uploaded SMCK)"); + tabbedPane.setMnemonicAt(3, KeyEvent.VK_4); + + tabbedPane.addTab("View keys", null, View_keys_panel, "View the contents of SMK, MCK and SMCK"); + tabbedPane.setMnemonicAt(4, KeyEvent.VK_5); + + //Refresh the combobox lists whenever the tab is changed to a tab with a combobox + ChangeListener changeListener = new ChangeListener() { + public void stateChanged(ChangeEvent changeEvent) { + JTabbedPane sourceTabbedPane = (JTabbedPane) changeEvent.getSource(); + int index = sourceTabbedPane.getSelectedIndex(); + //Refresh the combobox lists when we change to relevant tabs + if (index == 0) { + //MCK-SMK link panel + RefreshComboBox( SMKbox, "smk" ); + RefreshComboBox( MCKbox, "mck" ); + } else if (index == 2) { + //Clone MCK panel + RefreshComboBox( CloneMCKbox, "mck" ); + } else if (index == 4) { + //View keys panel + RefreshComboBox( ViewSMKbox, "smk" ); + RefreshComboBox( ViewMCKbox, "mck" ); + RefreshComboBox( ViewSMCKbox, "smck" ); } - - @Override - public void run() { - BufferedReader reader = null; - Writer writer = null; - + + } + }; + tabbedPane.addChangeListener(changeListener); + + //Setting up the messenger pane + //Static panel under the tabs + JPanel Messenger_panel = new JPanel(new GridLayout(0, 1, 1, 1)); + c.weighty = 0.8; + c.gridy = 1; + c.anchor = GridBagConstraints.PAGE_END; + cp.add(Messenger_panel,c); + { + GUI_messenger = new JTextArea(""); + GUI_messenger.setEditable(false); + GUI_messenger.setLineWrap(true); + GUI_messenger.setWrapStyleWord(true); + JScrollPane messenger_scrollPane = new JScrollPane(GUI_messenger); + //messenger_scrollPane.setPreferredSize(new Dimension(200, 30)); + DefaultCaret GUI_caret = (DefaultCaret)GUI_messenger.getCaret(); + GUI_caret.setUpdatePolicy(DefaultCaret.ALWAYS_UPDATE); + Messenger_panel.add(messenger_scrollPane); + } + + } + + private void RunProcess(String command) { + try { + + GUI_messenger.append("\nReceived command:\n"+command+"\n"); + + ProcessBuilder procbuild = new ProcessBuilder("python", "-c", command); + + Process proc = procbuild.start(); + + InputStream stdout = proc.getInputStream(); + InputStream stderr = proc.getErrorStream(); + Thread threadOut = new Thread( new MyInputStreamSink( stdout, "out", GUI_messenger )); + Thread threadErr = new Thread( new MyInputStreamSink( stderr, "err", GUI_messenger )); + threadOut.setDaemon(true); + threadErr.setDaemon(true); + threadOut.setName( String.format("stdout reader" )); + threadErr.setName( String.format("stderr reader" )); + threadOut.start(); + threadErr.start(); + + GUI_messenger.append("\n"); + + } catch (IOException err) { + } + } + + private static ArrayList<Integer> GetList( String dbalias, String keytype ) { + + ArrayList<Integer> results = new ArrayList<Integer>(); + String command = "from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;ms = MenuAwareMonitoringStandalone('"+dbalias+"');ms.oi.get_"+keytype+"_list()"; + + try { + ProcessBuilder procbuild = new ProcessBuilder("python", "-c", command); + Process proc = procbuild.start(); + BufferedReader reader = new BufferedReader(new InputStreamReader(proc.getInputStream())); + String line = null; + int linetemp = 0; + while ( (line = reader.readLine()) != null ) { + try { + linetemp = Integer.parseInt(line.trim()); + results.add(linetemp); + //System.out.println(line); + } catch ( NumberFormatException nfe ) { + //System.out.println("Not an int:"); + //System.out.println(line); + } + } + } catch ( IOException e ) { + e.printStackTrace(); + return results; + } + Collections.reverse(results); + return results; + } + + public void RefreshComboBox( JComboBox<Integer> combobox, String keytype ) { + + //Create and update the list of either SMK or MCK + ArrayList<Integer> menulist = new ArrayList<Integer>(); + menulist = GetList( dbalias, keytype ); + //Set the comboboxmodel, updating the dropdown list + combobox.setModel(new DefaultComboBoxModel<Integer>(menulist.toArray(new Integer[menulist.size()]))); + } + + public static void main(String[] args) throws InterruptedException, IOException { + // Run the GUI construction in the Event-Dispatching thread for thread-safety + SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + if (args.length == 0) { + // cannot start the GUI if the dbstring has not been provided + // should be "", "TRIGGERDB" or "TRIGGERDBREPR" + System.out.println("Cannot open the GUI without the DB alias to us."); + System.out.println("Please provide the DB alias as a command line arg."); + System.exit(1); + } else { try { - if ( m_outDest != null ) { - String starting_process = "Starting process:"; - m_outDest.append("\n\n"+starting_process); - } - - reader = new BufferedReader( new InputStreamReader( m_in ) ); - - for ( String line = null; ((line = reader.readLine()) != null); ) { - if ( print_to_terminal == true ) System.out.println(line); - if ( m_outDest != null ) { - if ( line.indexOf("Py:Athena") == -1 ) { - m_outDest.append("\n"); - m_outDest.append(line); - } - } - } - } catch (IOException e) { - System.out.println( "Unexpected I/O exception reading from process." ); - } /*catch (BadLocationException ble) { - }*/ - finally { - try { - if ( null != reader ) reader.close(); - } - catch ( java.io.IOException e ) { - System.out.println( "Unexpected I/O exception closing a stream." ); - } + + dbalias = args[0]; + if (args.length > 1) { + print_to_terminal = args[1].equalsIgnoreCase("true") || args[1].equalsIgnoreCase("t") || args[1].equalsIgnoreCase("yes") || args[1].equalsIgnoreCase("y") || args[1].equalsIgnoreCase("print") || args[1].equalsIgnoreCase("print_to_terminal") || args[1].equalsIgnoreCase("1"); + } + TrigMaMGUI frame = new TrigMaMGUI(); + + } catch (Exception e) { + e.printStackTrace(); } } + } + }); + } + + private static class MyInputStreamSink implements Runnable { + private InputStream m_in; + private String m_streamName; + private JTextArea m_outDest; + + MyInputStreamSink( InputStream in, String streamName, JTextArea outDest ) { + m_in = in; + m_streamName = streamName; + m_outDest = outDest; } - - - - public static void main(String[] args) throws InterruptedException, IOException { - - // Run the GUI construction in the Event-Dispatching thread for thread-safety - SwingUtilities.invokeLater(new Runnable() { - @Override - public void run() { - try { - TrigMaMGUI frame = new TrigMaMGUI(); - frame.setVisible(true); - } catch (Exception e) { - e.printStackTrace(); - } + + @Override + public void run() { + BufferedReader reader = null; + Writer writer = null; + + try { + if ( m_outDest != null ) { + //String starting_process = "Starting process:"; + //m_outDest.append("\n\n"+starting_process); + } + + reader = new BufferedReader( new InputStreamReader( m_in ) ); + + for ( String line = null; ((line = reader.readLine()) != null); ) { + if ( print_to_terminal == true ) System.out.println(line); + if ( m_outDest != null ) { + if ( line.indexOf("Py:Athena") == -1 ) { + m_outDest.append("\n"); + m_outDest.append(line); } - }); + } + } + } catch (IOException e) { + System.out.println( "Unexpected I/O exception reading from process." ); + } /*catch (BadLocationException ble) { + }*/ + finally { + try { + if ( null != reader ) reader.close(); + } catch ( java.io.IOException e ) { + System.out.println( "Unexpected I/O exception closing a stream." ); + } + } } + } } diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/java/GUI/TrigMaMGUI_TRIGGERDBREPR.java b/Trigger/TrigMonitoring/TrigHLTMonitoring/java/GUI/TrigMaMGUI_TRIGGERDBREPR.java deleted file mode 100644 index 1d702e0938f0..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/java/GUI/TrigMaMGUI_TRIGGERDBREPR.java +++ /dev/null @@ -1,490 +0,0 @@ -//// -//// GUI for TriggerHLTMonitoring MenuAwareMonitoring -//// built by Xanthe Hoad (xhoad@cern.ch) -//// - -import java.awt.*; -import java.awt.event.*; -import javax.swing.*; -import javax.swing.filechooser.*; -import javax.swing.JFileChooser; -import javax.swing.SwingUtilities; -import javax.swing.SwingWorker; -import javax.swing.text.*; -import java.io.File; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.StringWriter; -import java.io.Writer; -import java.io.Reader; -import java.util.concurrent.ExecutionException; -import java.util.List; -import java.lang.ProcessBuilder; - -public class TrigMaMGUI_TRIGGERDBREPR extends JFrame { - - private int MCK_ID; - private int SMK_ID; - private int clone_MCK_ID; - private String MCK_SMK_link_comment; - private JTextArea GUI_messenger; - private String jsonFilename; - private File jsonSelectedFile; - private String SMCK_list; - private String clone_release; - private String clone_version; - - static public Boolean print_to_terminal; - - public TrigMaMGUI_TRIGGERDBREPR () { - - print_to_terminal = true; - - //Set up - setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Exit program if close-window button clicked - setTitle("Trigger MaM User Interface"); // "this" JFrame sets title - setSize(520, 550); // "this" JFrame sets initial size - - // Retrieve the content-pane of the top-level container JFrame - // All operations done on the content-pane - Container cp = getContentPane(); - cp.setLayout(new GridLayout(0, 1, 1, 1)); - - JPanel SMK_MCK_link_panel = new JPanel(new GridLayout(0, 1, 1, 1)); - cp.add(SMK_MCK_link_panel); - { - JPanel SMK_MCK_header_panel = new JPanel(); - SMK_MCK_link_panel.add(SMK_MCK_header_panel); - JLabel SMK_MCK_header = new JLabel("Make a new SMK-MCK link:"); - SMK_MCK_header_panel.add(SMK_MCK_header); - - JPanel SMK_MCK_link_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - SMK_MCK_link_panel.add(SMK_MCK_link_subpanel_1); - SMK_MCK_link_subpanel_1.add(new JLabel("SMK ID:")); - final JTextField SMK_ID_field = new JTextField(5); - SMK_MCK_link_subpanel_1.add(SMK_ID_field); - SMK_ID = 0; - SMK_MCK_link_subpanel_1.add(new JLabel("MCK ID:")); - final JTextField MCK_ID_field = new JTextField(5); - SMK_MCK_link_subpanel_1.add(MCK_ID_field); - MCK_ID = 0; - - SMK_MCK_link_subpanel_1.add(new JLabel("Comment:")); - final JTextArea linkComment = new JTextArea(""); - linkComment.setEditable(true); - linkComment.setLineWrap(true); - linkComment.setWrapStyleWord(true); - JScrollPane scrollPane = new JScrollPane(linkComment); - scrollPane.setPreferredSize(new Dimension(200, 30)); - SMK_MCK_link_subpanel_1.add(scrollPane); - JButton SMK_MCK_link_button = new JButton("Make SMK-MCK link"); - - JPanel SMK_MCK_link_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - SMK_MCK_link_panel.add(SMK_MCK_link_subpanel_2); - SMK_MCK_link_subpanel_2.add(SMK_MCK_link_button); - final JCheckBox force_link_checkbox = new JCheckBox("Force link upload (only tick if certain you want to do this)"); - SMK_MCK_link_subpanel_2.add(force_link_checkbox); - - SMK_MCK_link_button.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - Boolean GoodValsFlag = true; //we have good MCK and SMK values unless they are found to be bad - - try { SMK_ID = Integer.parseInt(SMK_ID_field.getText().trim()); } - catch (NumberFormatException nfe) { GUI_messenger.append("\nBad SMK ID, must be an integer. "); GoodValsFlag = false; } - try { MCK_ID = Integer.parseInt(MCK_ID_field.getText().trim()); } - catch (NumberFormatException nfe) { GUI_messenger.append("\nBad MCK ID, must be an integer. "); GoodValsFlag = false; } - MCK_SMK_link_comment = linkComment.getText(); - - if ( !MCK_SMK_link_comment.contains("\"\"\"") && !MCK_SMK_link_comment.contains("'''") && !MCK_SMK_link_comment.contains("'") && !MCK_SMK_link_comment.matches("(.*)\"(.*)") && GoodValsFlag == true ) { - GUI_messenger.append("\nSMK " + SMK_ID + " and MCK " + MCK_ID +" are both integers, as required."); - - if ( force_link_checkbox.isSelected() ) { - - Object[] options = {"Yes", "Cancel"}; - int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to overide any existing links to: \nSMK ID:"+SMK_ID+" and create a link to MCK ID: "+MCK_ID+"? \nPlease be sure you want to do this. If you are unsure, click 'Cancel'.", - "Confirm SMK-MCK link override", - JOptionPane.YES_NO_OPTION, - JOptionPane.PLAIN_MESSAGE, - null, options, options[1]); - if (dialogResult == JOptionPane.YES_OPTION){ - try { - - String link_command = "from MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;mam = MenuAwareMonitoringStandalone('TRIGGERDBREPR');mam.force_deactivate_all_links_for_smk("+Integer.toString(SMK_ID)+",True);"; - ProcessBuilder submit_link = new ProcessBuilder("python", "-c", link_command); - - Process submit_link_proc = submit_link.start(); - - InputStream link_stdout = submit_link_proc.getInputStream(); - InputStream link_stderr = submit_link_proc.getErrorStream(); - Thread link_threadOut = new Thread( new MyInputStreamSink( link_stdout, "out", GUI_messenger )); - Thread link_threadErr = new Thread( new MyInputStreamSink( link_stderr, "err", null )); - link_threadOut.setDaemon(true); - link_threadErr.setDaemon(true); - link_threadOut.setName( String.format("link_stdout reader" )); - link_threadErr.setName( String.format("link_stderr reader" )); - link_threadOut.start(); - link_threadErr.start(); - - } catch (IOException err) { - } - } - else if (dialogResult == JOptionPane.NO_OPTION){ - GUI_messenger.append("\nForce upload cancelled."); - force_link_checkbox.setSelected(false); - } - - } - Object[] options = {"Yes", "Cancel"}; - int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to link: \nSMK ID:"+SMK_ID+" with MCK ID: "+MCK_ID+"\nwith comment: "+MCK_SMK_link_comment, - "Confirm SMK-MCK link", - JOptionPane.YES_NO_OPTION, - JOptionPane.PLAIN_MESSAGE, - null, options, options[1]); - if (dialogResult == JOptionPane.YES_OPTION){ - try { - - String link_command = "from MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;mam = MenuAwareMonitoringStandalone('TRIGGERDBREPR');mam.link_smk_to_mck("+Integer.toString(SMK_ID)+","+Integer.toString(MCK_ID)+",'"+MCK_SMK_link_comment+"');"; - ProcessBuilder submit_link = new ProcessBuilder("python", "-c", link_command); - - Process submit_link_proc = submit_link.start(); - - InputStream link_stdout = submit_link_proc.getInputStream(); - InputStream link_stderr = submit_link_proc.getErrorStream(); - Thread link_threadOut = new Thread( new MyInputStreamSink( link_stdout, "out", GUI_messenger )); - Thread link_threadErr = new Thread( new MyInputStreamSink( link_stderr, "err", null )); - link_threadOut.setDaemon(true); - link_threadErr.setDaemon(true); - link_threadOut.setName( String.format("link_stdout reader" )); - link_threadErr.setName( String.format("link_stderr reader" )); - link_threadOut.start(); - link_threadErr.start(); - - } catch (IOException err) { - } - } - else if (dialogResult == JOptionPane.NO_OPTION){ - GUI_messenger.append("\nLink upload cancelled."); - force_link_checkbox.setSelected(false); - } - } else if ( MCK_SMK_link_comment.contains("\"") || MCK_SMK_link_comment.contains("\"\"\"") || MCK_SMK_link_comment.contains("'''") || MCK_SMK_link_comment.contains("'") || MCK_SMK_link_comment.matches("(.*)\"(.*)") ) { - GUI_messenger.append("\nComment contains \' or \", please remove these characters."); - force_link_checkbox.setSelected(false); - } } - }); - } - - JPanel Make_MCK_panel = new JPanel(new GridLayout(0, 1, 1, 1)); - cp.add(Make_MCK_panel); - { - JPanel Make_MCK_header_panel = new JPanel(); - Make_MCK_panel.add(Make_MCK_header_panel); - JLabel Make_MCK_header = new JLabel("Make a new MCK:"); - Make_MCK_header_panel.add(Make_MCK_header); - - JPanel Make_MCK_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Make_MCK_panel.add(Make_MCK_subpanel_1); - Make_MCK_subpanel_1.add(new JLabel("Comma seperated SMCK list:")); - final JTextArea MCKlistField = new JTextArea(""); - MCKlistField.setEditable(true); - MCKlistField.setLineWrap(true); - MCKlistField.setWrapStyleWord(true); - JScrollPane MCKlistScrollPane = new JScrollPane(MCKlistField); - MCKlistScrollPane.setPreferredSize(new Dimension(290, 30)); - Make_MCK_subpanel_1.add(MCKlistScrollPane); - SMCK_list = ""; - - JPanel Make_MCK_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Make_MCK_panel.add(Make_MCK_subpanel_2); - JButton Make_MCK_button = new JButton("Make new MCK"); - Make_MCK_subpanel_2.add(Make_MCK_button); - - Make_MCK_button.addActionListener(new ActionListener() { //Make MCK button pressed - public void actionPerformed(ActionEvent e) { - - SMCK_list = MCKlistField.getText().trim(); - if ( SMCK_list.isEmpty() == true ) { - GUI_messenger.append("\nPlease add a comma seperated list of SMCKs."); - } else { - Object[] options = {"Yes", - "Cancel"}; - int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to join the listed SMCKs?", - "Confirm", - JOptionPane.YES_NO_OPTION, - JOptionPane.PLAIN_MESSAGE, - null, options, options[1]); - if (dialogResult == JOptionPane.YES_OPTION){ - try { - - String Make_MCK_command = "from MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;mam = MenuAwareMonitoringStandalone('TRIGGERDBREPR');mam.upload_mck(["+SMCK_list+"]);"; - ProcessBuilder make_mck = new ProcessBuilder("python", "-c", Make_MCK_command); - - Process make_mck_proc = make_mck.start(); - - InputStream make_mck_stdout = make_mck_proc.getInputStream(); - InputStream make_mck_stderr = make_mck_proc.getErrorStream(); - Thread make_mck_threadOut = new Thread( new MyInputStreamSink( make_mck_stdout, "out", GUI_messenger )); - Thread make_mck_threadErr = new Thread( new MyInputStreamSink( make_mck_stderr, "err", null )); - make_mck_threadOut.setDaemon(true); - make_mck_threadErr.setDaemon(true); - make_mck_threadOut.setName( String.format("make_mck_stdout reader" )); - make_mck_threadErr.setName( String.format("make_mck_stderr reader" )); - make_mck_threadOut.start(); - make_mck_threadErr.start(); - - } catch (IOException err) { - } - } - } - } - - }); - - } - - JPanel Clone_MCK_panel = new JPanel(new GridLayout(0, 1, 1, 1)); - cp.add(Clone_MCK_panel); - { - JPanel Clone_MCK_header_panel = new JPanel(); - Clone_MCK_panel.add(Clone_MCK_header_panel); - JLabel Clone_MCK_header = new JLabel("Clone an MCK for use in a different release:"); - Clone_MCK_header_panel.add(Clone_MCK_header); - - JPanel Clone_MCK_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Clone_MCK_panel.add(Clone_MCK_subpanel_1); - Clone_MCK_subpanel_1.add(new JLabel("MCK ID:")); - final JTextField MCK_ID_field = new JTextField(5); - Clone_MCK_subpanel_1.add(MCK_ID_field); - clone_MCK_ID = 0; - Clone_MCK_subpanel_1.add(new JLabel("ATLAS project:")); - final JTextField release_field = new JTextField(8); - Clone_MCK_subpanel_1.add(release_field); - clone_release = ""; - Clone_MCK_subpanel_1.add(new JLabel("ATLAS version:")); - final JTextField version_field = new JTextField(8); - Clone_MCK_subpanel_1.add(version_field); - clone_version = ""; - - JPanel Clone_MCK_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Clone_MCK_panel.add(Clone_MCK_subpanel_2); - JButton Clone_MCK_button = new JButton("Clone MCK"); - Clone_MCK_subpanel_2.add(Clone_MCK_button); - - Clone_MCK_button.addActionListener(new ActionListener() { //Clone MCK button pressed - public void actionPerformed(ActionEvent e) { - - Boolean GoodValsFlag = true; //we have good MCK and SMK values unless they are found to be bad - try { clone_MCK_ID = Integer.parseInt(MCK_ID_field.getText().trim()); } - catch (NumberFormatException nfe) { GUI_messenger.append("\nBad MCK ID, must be an integer. "); GoodValsFlag = false; } - - clone_release = release_field.getText().trim(); - clone_version = version_field.getText().trim(); - if ( clone_release.isEmpty() == true || clone_version.isEmpty() ) { - GUI_messenger.append("\nPlease add the release and version that you want the new MCK to be valid in."); - } else { - if ( GoodValsFlag == true ) { - Object[] options = {"Yes", - "Cancel"}; - int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to clone the entered MCK?", - "Confirm", - JOptionPane.YES_NO_OPTION, - JOptionPane.PLAIN_MESSAGE, - null, options, options[1]); - if (dialogResult == JOptionPane.YES_OPTION){ - try { - - String Clone_MCK_command = "from MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;mam = MenuAwareMonitoringStandalone('TRIGGERDBREPR');mam.clone_mck_for_new_release("+clone_MCK_ID+",'"+clone_release+"','"+clone_version+"');"; - ProcessBuilder clone_mck = new ProcessBuilder("python", "-c", Clone_MCK_command); - - Process clone_mck_proc = clone_mck.start(); - - InputStream clone_mck_stdout = clone_mck_proc.getInputStream(); - InputStream clone_mck_stderr = clone_mck_proc.getErrorStream(); - Thread clone_mck_threadOut = new Thread( new MyInputStreamSink( clone_mck_stdout, "out", GUI_messenger )); - Thread clone_mck_threadErr = new Thread( new MyInputStreamSink( clone_mck_stderr, "err", null )); - clone_mck_threadOut.setDaemon(true); - clone_mck_threadErr.setDaemon(true); - clone_mck_threadOut.setName( String.format("clone_mck_stdout reader" )); - clone_mck_threadErr.setName( String.format("clone_mck_stderr reader" )); - clone_mck_threadOut.start(); - clone_mck_threadErr.start(); - - } catch (IOException err) { - } - } - } - } - } - }); - - } - - JPanel Upload_json_panel = new JPanel(new GridLayout(0, 1, 1, 1)); - cp.add(Upload_json_panel); - { - JPanel json_header_panel = new JPanel(); - Upload_json_panel.add(json_header_panel); - JLabel json_header = new JLabel("Upload a .json file to the database:"); - json_header_panel.add(json_header); - - JPanel Upload_json_subpanel_1 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Upload_json_panel.add(Upload_json_subpanel_1); - Upload_json_subpanel_1.add(new JLabel(".json file to upload:")); - final JFileChooser jsonChooser = new JFileChooser(); - final JTextField jsonFilenameField = new JTextField(32); - jsonFilenameField.setEditable(false); - Upload_json_subpanel_1.add(jsonFilenameField); - - JPanel Upload_json_subpanel_2 = new JPanel(new FlowLayout(FlowLayout.CENTER,1,1)); - Upload_json_panel.add(Upload_json_subpanel_2); - JButton jsonChooserButton = new JButton("Choose file"); - Upload_json_subpanel_2.add(jsonChooserButton); - final JButton submitFile = new JButton("Submit file"); - submitFile.setEnabled(false); //Cannot submit file until a file has been picked - Upload_json_subpanel_2.add(submitFile); - - jsonChooserButton.addActionListener(new ActionListener() { //Choose file button pressed - public void actionPerformed(ActionEvent e) { - int chooserResult = jsonChooser.showOpenDialog((Component)e.getSource()); //Open file chooser - if (chooserResult == JFileChooser.APPROVE_OPTION) { - jsonSelectedFile = jsonChooser.getSelectedFile(); - jsonFilename = jsonSelectedFile.getAbsolutePath(); - jsonFilenameField.setText(jsonFilename+"\n"); - submitFile.setEnabled(true); - } - } - }); - - submitFile.addActionListener(new ActionListener() { //Submit file button pressed - public void actionPerformed(ActionEvent e) { - - if (jsonSelectedFile.exists() && !jsonSelectedFile.isDirectory() /*&& release != null*/ ) { - String extension = jsonFilename.substring(jsonFilename.lastIndexOf("."),jsonFilename.length()); - String file_with_extension = jsonFilename.substring(jsonFilename.lastIndexOf("/")+1,jsonFilename.length()); - if (".json".equalsIgnoreCase(extension)) { - GUI_messenger.append("\nGood file extension "+extension+"."); - - Object[] options = {"Yes","Cancel"}; - - int dialogResult = JOptionPane.showOptionDialog(null, "Are you sure you want to upload "+file_with_extension+" to the database?", - - "Confirm file upload", - JOptionPane.YES_NO_OPTION, - JOptionPane.PLAIN_MESSAGE, - null, options, options[1]); - if (dialogResult == JOptionPane.YES_OPTION){ - try { - - String json_command = "from MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone;mam = MenuAwareMonitoringStandalone('TRIGGERDBREPR');mam.upload_config_from_json('"+jsonFilename+"');"; - ProcessBuilder submit_json = new ProcessBuilder("python", "-c", json_command); - - Process submit_json_proc = submit_json.start(); - - InputStream json_stdout = submit_json_proc.getInputStream(); - InputStream json_stderr = submit_json_proc.getErrorStream(); - Thread json_threadOut = new Thread( new MyInputStreamSink( json_stdout, "out", GUI_messenger )); - Thread json_threadErr = new Thread( new MyInputStreamSink( json_stderr, "err", null )); - json_threadOut.setDaemon(true); - json_threadErr.setDaemon(true); - json_threadOut.setName( String.format("json_stdout reader" )); - json_threadErr.setName( String.format("json_stderr reader" )); - json_threadOut.start(); - json_threadErr.start(); - - } catch (IOException err) { - } - } - else if (dialogResult == JOptionPane.NO_OPTION){ - GUI_messenger.append("\n"+file_with_extension+" upload cancelled."); - } - } - else GUI_messenger.append("\nWrong file extension ("+extension+"): require .json."); - } - } - }); - } - - JPanel Messenger_panel = new JPanel(new GridLayout(0, 1, 1, 1)); - cp.add(Messenger_panel); - { - GUI_messenger = new JTextArea(""); - GUI_messenger.setEditable(false); - GUI_messenger.setLineWrap(true); - GUI_messenger.setWrapStyleWord(true); - JScrollPane messenger_scrollPane = new JScrollPane(GUI_messenger); - messenger_scrollPane.setPreferredSize(new Dimension(200, 30)); - DefaultCaret GUI_caret = (DefaultCaret)GUI_messenger.getCaret(); - GUI_caret.setUpdatePolicy(DefaultCaret.ALWAYS_UPDATE); - Messenger_panel.add(messenger_scrollPane); - - } - - } - - private static class MyInputStreamSink implements Runnable { - private InputStream m_in; - private String m_streamName; - private JTextArea m_outDest; - - MyInputStreamSink( InputStream in, String streamName, JTextArea outDest ) { - m_in = in; - m_streamName = streamName; - m_outDest = outDest; - } - - @Override - public void run() { - BufferedReader reader = null; - Writer writer = null; - - try { - if ( m_outDest != null ) { - String starting_process = "Starting process:"; - m_outDest.append("\n\n"+starting_process); - } - - reader = new BufferedReader( new InputStreamReader( m_in ) ); - - for ( String line = null; ((line = reader.readLine()) != null); ) { - if ( print_to_terminal == true ) System.out.println(line); - if ( m_outDest != null ) { - if ( line.indexOf("Py:Athena") == -1 ) { - m_outDest.append("\n"); - m_outDest.append(line); - } - } - } - } catch (IOException e) { - System.out.println( "Unexpected I/O exception reading from process." ); - } /*catch (BadLocationException ble) { - }*/ - finally { - try { - if ( null != reader ) reader.close(); - } - catch ( java.io.IOException e ) { - System.out.println( "Unexpected I/O exception closing a stream." ); - } - } - } - } - - - - public static void main(String[] args) throws InterruptedException, IOException { - - // Run the GUI construction in the Event-Dispatching thread for thread-safety - SwingUtilities.invokeLater(new Runnable() { - @Override - public void run() { - try { - TrigMaMGUI_TRIGGERDBREPR frame = new TrigMaMGUI_TRIGGERDBREPR(); - frame.setVisible(true); - } catch (Exception e) { - e.printStackTrace(); - } - } - }); - } -} diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/java/start_TrigMaMGUI.sh b/Trigger/TrigMonitoring/TrigHLTMonitoring/java/TrigMaMGUI_P1.sh similarity index 60% rename from Trigger/TrigMonitoring/TrigHLTMonitoring/java/start_TrigMaMGUI.sh rename to Trigger/TrigMonitoring/TrigHLTMonitoring/java/TrigMaMGUI_P1.sh index 4025f5545d6c..69b239abbd2f 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/java/start_TrigMaMGUI.sh +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/java/TrigMaMGUI_P1.sh @@ -1,16 +1,13 @@ # TO RUN THE GUI, PLEASE RUN THE COMMAND: -# source start_TrigMaMGUI.sh -# This script assumes the following file locations, relative to the location of the script: -# MenuAwareMonitoringStandalone.py and OracleInterface.py are in ../python -# and ./GUI/TrigMaMGUI.java +# source TrigMaMGUI_P1.sh echo echo " Trigger Menu Aware Monitoring Graphical User Interface" echo " by Xanthe Hoad xanthe.hoad@cern.ch" -echo " For more info about Menu Aware Monitoring see" +echo " For more info about Menu Aware Monitoring see" echo " https://twiki.cern.ch/twiki/bin/view/Atlas/MaDQM" echo -source /sw/atlas/AtlasSetup/scripts/asetup.sh p1hlt,20.2.3.2,gcc48 +source /sw/atlas/AtlasSetup/scripts/asetup.sh p1hlt,20.11.0.23,here export MAM_CORAL_DBLOOKUP_PATH=/det/tdaq/hlt/mam/authentication/ export MAM_CORAL_AUTH_PATH=$MAM_CORAL_DBLOOKUP_PATH @@ -26,4 +23,8 @@ else export PYTHONPATH=$MaMPythonFilesPath:$PYTHONPATH fi -java -cp $SCRIPTDIR/GUI: TrigMaMGUI \ No newline at end of file +#Uncomment the following line to recompile the GUI +#WARNING: If compiling, please call the script from an empty directory +#The java compiler at P1 scrambles files when run in a non-empty directory +#javac $SCRIPTDIR/GUI/TrigMaMGUI.java +java -cp $SCRIPTDIR/GUI: TrigMaMGUI TRIGGERDBR2MAM diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/java/TrigMaMGUI_TRIGGERDBREPR.sh b/Trigger/TrigMonitoring/TrigHLTMonitoring/java/TrigMaMGUI_TRIGGERDBREPR.sh new file mode 100755 index 000000000000..8e38a7f89c3f --- /dev/null +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/java/TrigMaMGUI_TRIGGERDBREPR.sh @@ -0,0 +1,31 @@ +# TO RUN THE GUI, PLEASE RUN THE COMMAND: +# source TrigMaMGUI_TRIGGERDBREPR.sh + +if [ -z "$AtlasProject" ] +then + echo "Please setup a release first." + echo "> setupATLAS" + echo "> asetup ..." + return 1 +fi + +echo +echo " Trigger Menu Aware Monitoring Graphical User Interface" +echo " for use with TRIGGERDBREPR" +echo " by Xanthe Hoad xanthe.hoad@cern.ch" +echo " For more info about Menu Aware Monitoring see" +echo " https://twiki.cern.ch/twiki/bin/view/Atlas/MaDQM" +echo + +if [[ $(python -V 2>&1) == *"2.6"* ]] +then + echo " MAM needs Python version > 2.7, setting up..." + echo + setupATLAS + lsetup python +fi + +SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +javac $SCRIPTDIR/../java/TrigMaMGUI.java +java -cp $SCRIPTDIR/../java: TrigMaMGUI TRIGGERDBREPR diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/java/start_TrigMaMGUI_TRIGGERDBREPR.sh b/Trigger/TrigMonitoring/TrigHLTMonitoring/java/start_TrigMaMGUI_TRIGGERDBREPR.sh deleted file mode 100644 index fca58ff70f1f..000000000000 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/java/start_TrigMaMGUI_TRIGGERDBREPR.sh +++ /dev/null @@ -1,43 +0,0 @@ -# TO RUN THE GUI, PLEASE RUN THE COMMAND: -# source start_TrigMaMGUI_TRIGGERDBREPR.sh -# This script assumes the following file locations, relative to the location of the script: -# MenuAwareMonitoringStandalone.py and OracleInterface.py are in ../python -# and ./GUI/TrigMaMGUI_TRIGGERDBREPR.java - -if [ -z "$AtlasPatch" ] -then -echo "Please setup a release first." -echo "> setupATLAS" -echo "> asetup ..." -return 1 -fi - -echo -echo " Trigger Menu Aware Monitoring Graphical User Interface" -echo " for use with TRIGGERDBREPR" -echo " by Xanthe Hoad xanthe.hoad@cern.ch" -echo " For more info about Menu Aware Monitoring see" -echo " https://twiki.cern.ch/twiki/bin/view/Atlas/MaDQM" -echo - -if [[ $(python -V 2>&1) == *"2.6"* ]] -then - echo " MAM needs Python version > 2.7, setting up..." - echo - setupATLAS - lsetup python -fi - -SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -MaMPythonFilesPath=${SCRIPTDIR%/*}/python - -if [[ $PYTHONPATH == *$MaMPythonFilesPath* ]] -then - echo $MaMPythonFilesPath "is in PYTHONPATH" -else - echo "Adding" $MaMPythonFilesPath "to PYTHONPATH" - export PYTHONPATH=$MaMPythonFilesPath:$PYTHONPATH -fi - -javac $SCRIPTDIR/GUI/TrigMaMGUI_TRIGGERDBREPR.java -java -cp $SCRIPTDIR/GUI: TrigMaMGUI_TRIGGERDBREPR \ No newline at end of file diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/HLTMonTriggerList.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/HLTMonTriggerList.py index cbad01525ec9..eaff169c75a1 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/HLTMonTriggerList.py +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/HLTMonTriggerList.py @@ -15,18 +15,21 @@ if not 'jobproperties' in dir(): if not 'rec' in dir(): from RecExConfig.RecFlags import rec +if not 'DQMonFlags' in dir(): + from AthenaMonitoring.DQMonFlags import DQMonFlags + class HLTMonTriggerList: - + # HLTMonTriggerList config _configured = False - + # running mode config _get_monitoring_mode_success = False + mc_mode = False pp_mode = False - pPb_mode = False HI_mode = False cosmic_mode = False - + # trigger lists monitoring_mujet = [] monitoring_bjet = [] @@ -58,90 +61,83 @@ class HLTMonTriggerList: monitoring_muon_Support = [] monitoring_tau = [] monitoring_singleTau = [] - - + + def __init__(self): if not self._configured: self.config() - + def config(self): - + self.set_HLTMonTrigList_default() - + self._get_monitoring_mode_success = self.get_monitoring_mode() - - #print self._get_monitoring_mode_success - #self._get_monitoring_mode_success = False - + if self._get_monitoring_mode_success == False: # what should be done in this case? print "HLTMonTriggerList: Error getting monitoring mode, default monitoring lists will be used." - - elif self.pp_mode == True: + + elif self.pp_mode == True: print "HLTMonTriggerList: Setting up pp monitoring." self.set_HLTMonTrigList_pp() + elif self.mc_mode == True: + print "HLTMonTriggerList: Setting up MC monitoring." + self.set_HLTMonTrigList_mc() + elif self.HI_mode == True: print "HLTMonTriggerList: Setting up HI monitoring." self.set_HLTMonTrigList_HI() - elif self.pPb_mode == True: - print "HLTMonTriggerList: Setting up pPb monitoring." - self.set_HLTMonTrigList_pPb() - - elif self.cosmic_mode == True: + elif self.cosmic_mode == True: print "HLTMonTriggerList: Setting up cosmic monitoring." self.set_HLTMonTrigList_cosmic() - + self._configured = True - + # Implementation of https://its.cern.ch/jira/browse/ATR-13200 def get_monitoring_mode(self): # Set monitoring mode - if jobproperties.Beam.beamType() == 'collisions': - - if rec.doHeavyIon == False and rec.doHIP == False: - self.pp_mode = True - return True - - if rec.doHeavyIon == True and rec.doHIP == False: - self.HI_mode = True - return True - - if rec.doHeavyIon == False and rec.doHIP == True: - self.pPb_mode = True - return True - - elif jobproperties.Beam.beamType() == 'cosmics': - - if rec.doHeavyIon == False and rec.doHIP == False: - self.cosmic_mode = True - return True - - elif jobproperties.Beam.beamType() == 'singlebeam': - # what should be done in this case? - pass + self.data_type = DQMonFlags.monManDataType() + + if self.data_type == 'monteCarlo': + self.mc_mode = True + return True + + elif self.data_type == 'collisions': + #singlebeam is included in collisions + self.pp_mode = True + return True + + elif self.data_type == 'heavyioncollisions': + #heavy ion is pPb or PbPb + self.HI_mode = True + return True + + elif self.data_type == 'cosmics': + self.cosmic_mode = True + return True + + else: + return False - return False - # Config of default/running mode independent monitoring lists def set_HLTMonTrigList_default(self): # Monitoring lists (defaults) which are independent of running mode: # These are the trigger lists that will be used unless they are overwritten based on the running mode. In this way the trigger lists can be updated based on running mode with no risk of a signature being left unmonitored - + # set the bjet and mujet triggers to the default values self.monitoring_bjet = bjet.monitoring_bjet self.monitoring_mujet = bjet.monitoring_mujet - + # set the bphys triggers to the default values self.monitoring_bphys = bphys.monitoring_bphys self.primary_bphys = bphys.primary_bphys - + # set the egamma triggers to the default values self.monitoring_egamma = egamma.monitoring_egamma - self.primary_single_ele = egamma.primary_single_ele self.primary_single_ele_iso = egamma.primary_single_ele_iso self.primary_single_ele_cutbased = egamma.primary_single_ele_cutbased @@ -154,86 +150,94 @@ class HLTMonTriggerList: self.monitoring_Jpsiee = egamma.monitoring_Jpsiee self.primary_single_pho = egamma.primary_single_pho self.primary_double_pho = egamma.primary_double_pho - + # set the jet triggers to the default values self.monitoring_l1jet = jets.monitoring_l1jet self.monitoring_jet = jets.monitoring_jet self.primary_l1jet = jets.primary_l1jet self.primary_jet = jets.primary_jet - + # set the met triggers to the default values self.monitoring_met = met.monitoring_met - + # set the minbias triggers to the default values self.monitoring_minbias = minbias.monitoring_minbias - + # set the muon triggers to the default values self.monitoring_muonNonIso = muon.monitoring_muonNonIso self.monitoring_muonIso = muon.monitoring_muonIso self.monitoring_MSonly = muon.monitoring_MSonly self.monitoring_muonEFFS = muon.monitoring_muonEFFS self.monitoring_muon_Support = muon.monitoring_muon_Support - + # set the tau triggers to the default values self.monitoring_tau = tau.monitoring_tau self.monitoring_singleTau = tau.monitoring_singleTau - - # Config of monitoring lists which are dependent on running mode + + # Config of monitoring lists which are dependent on running mode def set_HLTMonTrigList_pp(self): + self.monitoring_bjet = bjet.monitoring_bjet_pp + self.monitoring_mujet = bjet.monitoring_mujet_pp + self.primary_bphys = bphys.primary_bphys_pp - self.monitoring_tau = tau.monitoring_tau_pp - self.monitoring_met = met.monitoring_met_pp - self.monitoring_l1jet = jets.monitoring_l1jet_pp self.monitoring_jet = jets.monitoring_jet_pp self.primary_l1jet = jets.primary_l1jet_pp self.primary_jet = jets.primary_jet_pp + self.monitoring_met = met.monitoring_met_pp + self.monitoring_muonNonIso = muon.monitoring_muonNonIso_pp self.monitoring_muonIso = muon.monitoring_muonIso_pp - self.monitoring_MSonly = muon.monitoring_MSonly_pp + self.monitoring_MSonly = muon.monitoring_MSonly_pp self.monitoring_muonEFFS = muon.monitoring_muonEFFS_pp self.monitoring_muon_Support = muon.monitoring_muon_Support_pp + self.monitoring_tau = tau.monitoring_tau_pp + def set_HLTMonTrigList_HI(self): - self.primary_bphys = bphys.primary_bphys_hi + self.monitoring_bjet = bjet.monitoring_bjet_hi + self.monitoring_mujet = bjet.monitoring_mujet_hi - self.monitoring_tau = tau.monitoring_tau_pp - self.monitoring_met = [] + self.primary_bphys = bphys.primary_bphys_hi self.monitoring_l1jet = jets.monitoring_l1jet_hi self.monitoring_jet = jets.monitoring_jet_hi self.primary_l1jet = jets.primary_l1jet_hi self.primary_jet = jets.primary_jet_hi - self.monitoring_muonNonIso = muon.monitoring_muonNonIso_HI - self.monitoring_muonIso = muon.monitoring_muonIso_HI - self.monitoring_MSonly = muon.monitoring_MSonly_HI - self.monitoring_muonEFFS = muon.monitoring_muonEFFS_HI - self.monitoring_muon_Support = muon.monitoring_muon_Support_HI - - def set_HLTMonTrigList_pPb(self): - self.monitoring_l1jet = jets.monitoring_l1jet_hi - self.monitoring_jet = jets.monitoring_jet_hi - self.primary_l1jet = jets.primary_l1jet_hi - self.primary_jet = jets.primary_jet_hi + self.monitoring_met = [] self.monitoring_muonNonIso = muon.monitoring_muonNonIso_HI self.monitoring_muonIso = muon.monitoring_muonIso_HI - self.monitoring_MSonly = muon.monitoring_MSonly_HI + self.monitoring_MSonly = muon.monitoring_MSonly_HI self.monitoring_muonEFFS = muon.monitoring_muonEFFS_HI self.monitoring_muon_Support = muon.monitoring_muon_Support_HI def set_HLTMonTrigList_cosmic (self): - self.monitoring_tau = tau.monitoring_tau_cosmic - self.monitoring_met = met.monitoring_met_cosmic + self.monitoring_bjet = bjet.monitoring_bjet_cosmic self.monitoring_l1jet = jets.monitoring_l1jet_cosmic self.monitoring_jet = jets.monitoring_jet_cosmic self.primary_l1jet = jets.primary_l1jet_cosmic self.primary_jet = jets.primary_jet_cosmic - + + self.monitoring_met = met.monitoring_met_cosmic + + self.monitoring_tau = tau.monitoring_tau_cosmic + + + def set_HLTMonTrigList_mc(self): + self.monitoring_bjet = bjet.monitoring_bjet_validation + + self.monitoring_l1jet = jets.monitoring_l1jet_validation + self.monitoring_jet = jets.monitoring_jet_validation + self.primary_l1jet = jets.primary_l1jet_validation + self.primary_jet = jets.primary_jet_validation + + self.monitoring_tau = tau.monitoring_tau_validation + hltmonList = HLTMonTriggerList() diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoring.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoring.py index 4e14804e31d9..68c690f1e8bf 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoring.py +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoring.py @@ -1,28 +1,27 @@ # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration # -# Authors: Ben Smart (Ben.Smart@cern.ch), Xanthe Hoad (Xanthe.Hoad@cern.ch) +# Authors: Ben Smart (Ben.Smart@cern.ch), Xanthe Hoad (Xanthe.Hoad@cern.ch) # -import sys,os -# import Athena-less MaM +import sys,os,shutil,re from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone -# import oracle interaction class from TrigHLTMonitoring.OracleInterface import OracleInterface -# import tool interrogator from TrigHLTMonitoring.ToolInterrogator import ToolInterrogator # needed to access the tools in ToolSvc from AthenaCommon.AppMgr import ToolSvc # use rec to find out if we are making ESDs and/or AODs from RecExConfig.RecFlags import rec -# import subprocess. Required to get Athena version (there must be a better way!) -import subprocess +# import subprocess and uuid, used to get default configurations +import subprocess,uuid # import hash library for generating smck config hashes import hashlib # import json for converting configuration dictionaries into strings for hashing import json # for getting connection details from xml.dom import minidom +# for monitoring mode setup +from AthenaMonitoring.DQMonFlags import DQMonFlags # all Menu-Aware Monitoring stuff in one class # it uses OracleInterface to talk to the Oracle database @@ -44,38 +43,29 @@ class MenuAwareMonitoring: # flag so that diff instruction are only printed once self.firstdiff = True - - # create MaMStandalone interaction object (includes connecting to oracle) + + # create MaMStandalone interaction object (includes connecting to oracle) self.ms = MenuAwareMonitoringStandalone(alias,database_username,database_password,database_name,database_directory) - + # get athena version self.__get_athena_version__() - + # holder for stream (bulk or express) self.stream = "" self.__get_stream__() # print guide for user if this is an interactive session if self.ms.__is_session_interactive__(): - + print "Running in Athena release",self.ms.current_athena_version print "Stream detected:",self.stream - print "" # create tool interrogator object self.ti = ToolInterrogator() - # need to grab ms.local_global_info and make all methods act on that - - # pointer to local tool info - self.local = self.ms.local_global_info['MONITORING_TOOL_DICT'] - - # automatically fill current local tool info - self.get_current_local_info() - - # fill default global info (if available) - #if self.ms.connected_to_oracle == True: - #self.ms.get_default_from_db(self.ms.current_athena_version) + # flag to prevent multiple calls to setup_all_local_tools, as it doesn't seem like the current config is picked up is this is tried + self.tools_setup = False + self.setup_mode = "" def __quiet_output__(self): @@ -99,10 +89,10 @@ class MenuAwareMonitoring: The local variable <ThisVariable>.stream is set to the result.""" # set self.stream to the output of rec.triggerStream() - # this will equal 'EXPRESS' in the case of express stream, and 'BULK' in the case of the bulk stream. + # this will equal 'EXPRESS' in the case of express stream, and 'BULK' in the case of the bulk stream. # rec.triggerStream() seems to be able to take on many other values. - # - # if extra options are desired as valid user input to match this variable, + # + # if extra options are desired as valid user input to match this variable, # then thy just need to be added to the list of valid inputs in the function # ms.__ask_for_processing_stream__() # in the list valid_input @@ -110,6 +100,67 @@ class MenuAwareMonitoring: self.stream = str(rec.triggerStream()).upper() + def __get_athena_version__(self): + "Get the current Athena version." + + self.ms.current_athena_version = self.get_release_setup().replace(",","-") + + + def get_release_setup(self): + """Get the full current release setup in a form that can be passed to asetup. + Taken/modified from RunTier0Tests.py""" + + # rel20 version + if 'AtlasPatch' in os.environ: + project = os.environ['AtlasPatch'] # need this to pick up AtlasProd1 in cmt builds + + if 'AtlasPatchVersion' in os.environ: + current_nightly = os.environ['AtlasPatchVersion'] + elif 'AtlasArea' in os.environ: + current_nightly = os.environ['AtlasArea'].split('/')[-1] + elif 'AtlasVersion' in os.environ: + current_nightly = os.environ['AtlasVersion'] + + if "rel" not in current_nightly: + setup = "%s,%s"%(project,current_nightly) + # 'AtlasProduction,20.7.9.8' + else: + + if 'AtlasBuildBranch' in os.environ: + release = os.environ['AtlasBuildBranch'] + else: + release = os.environ['ATLAS_RELEASE_BASE'] + if 'afs' in release.split('/'): + release = release.split('/')[-1] + elif 'cvmfs' in release.split('/'): + release = release.split('/')[-2] + + setup="%s,%s,%s"%(project,release,current_nightly) + # 'AtlasProduction,20.7.X.Y-VAL,rel_3' + + # rel21 version + else: + if 'AtlasProject' in os.environ: + project = os.environ['AtlasProject'] # no patches in rel21 + + release = "" + if 'AtlasVersion' in os.environ: + release = os.environ['AtlasVersion'] + + if "rel" in release: + current_nightly = release + release = os.environ['AtlasBuildBranch'] + setup="%s,%s,%s"%(project,release,current_nightly) + elif 'AtlasBuildStamp' in os.environ: + release = os.environ['AtlasBuildBranch'] + current_nightly = os.environ['AtlasBuildStamp'] # denotes a cmakegit nightly + setup="%s,%s,%s"%(project,release,current_nightly) + else: + setup="%s,%s"%(project,release) + + return setup + + def __get_tag__(self,package=""): """Get the tag of the input package. If the trunk/HEAD of this package is being used, the svn revision number will also be returned.""" @@ -118,41 +169,79 @@ class MenuAwareMonitoring: if package == "": return "" - # if the package is checked out locally, then find it - bash_command = "echo $(if [ -e $TestArea/InstallArea/include/"+package+"/"+package+"/ ] ; then cat $(cat $TestArea/InstallArea/include/"+package+"/"+package+".cmtref )/../.svn/entries | grep -m 1 -B 1 \"svn+ssh\"; fi) | sed \"s@ @ URL: @\" " - local_version = subprocess.check_output( bash_command , shell=True).replace("\n","") - - # get the tag (only valid if a non-local package is being used!) - bash_command = "for d in $(echo $JOBOPTSEARCHPATH | sed \"s@:@ @g\"); do if [ -e $d/../include/"+package+"/"+package+"/ ]; then cat $d/../../Trigger/TrigMonitoring/"+package+"/cmt/version.cmt; fi; done | grep -m 1 \""+package+"\" " - package_tag = subprocess.check_output( bash_command , shell=True).replace("\n","") - - # if something local has been found and it is the trunk, then return that, otherwise return the package_tag - if local_version != "": - if local_version.__contains__("trunk"): - # string to return - return_str = "Revision: "+local_version - return return_str - if package_tag != "": - return package_tag + if 'CMTPATH' in os.environ: + # for cmt releases + cmd = ['cmt', 'show', 'packages'] + cmtProc = subprocess.Popen(cmd, shell = False, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, bufsize = 1) + cmtOut = cmtProc.communicate()[0] + for line in cmtOut.split('\n'): + try: + # could make this faster using re + if line.strip() == '': + continue + (packageName, packageVersion, packagePath) = line.split() + if packageName == package: + return packageVersion + except ValueError: + print "Warning, unusual output from cmt: %s\n" % line + elif 'CMAKE_PREFIX_PATH' in os.environ: + # for cmake releases + cpath = os.getenv("CMAKE_PREFIX_PATH") + for cdir in cpath.split( ":" ): + # Look for packages.txt + cfilename = os.path.join( cdir, "packages.txt" ) + try: + cfile = open( cfilename, "r" ) + except: + #print "Could not open file: %s" % ( cfile ) + continue + # look for the package + m = re.search( "[^\n]*/%s [^\n]+" % package, cfile.read() ) + if m: + #print "Release directory: %s" % (cdir) + packageVersion = m.group( 0 ).split()[1] + return packageVersion + + # if we get this far, the packageVersion hasn't been found + return "" + + + def find_and_list_patch_packages(self): + """Find the patch packages the user has set up. + Taken/modified from RunTier0Tests.py""" + + npackages = 0 + if 'CMTPATH' in os.environ: + if 'TestArea' in os.environ and os.access(os.environ['TestArea'], os.R_OK): + print "Patch packages in your InstallArea are:\n" + cmd = ['cmt', 'show', 'packages', os.environ['TestArea']] + cmtProc = subprocess.Popen(cmd, shell = False, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, bufsize = 1) + cmtOut = cmtProc.communicate()[0] + for line in cmtOut.split('\n'): + try: + if line.strip() == '': + continue + (package, packageVersion, packagePath) = line.split() + print '\t%s\n' % (packageVersion) + npackages = npackages+1 + except ValueError: + print "Warning, unusual output from cmt: %s\n" % line + elif 'WorkDir_DIR' in os.environ : + # does this work for git packages? + print "Patch packages in your build are:\n" + myfilepath = os.environ['WorkDir_DIR'] + fname = str(myfilepath) + '/packages.txt' + npackages = 0 + with open(fname) as fp: + for line in fp: + if '#' not in line: + print line + npackages=npackages+1 else: - return "" - - - def __get_athena_version__(self): - "Get the current Athena version." - - # Probably only want to store the first 4 digits? - # get the current local Athena version (there must be a better way!) - AtlasVersion = subprocess.check_output("echo $AtlasVersion", shell=True).replace("\n","") - AtlasProject = subprocess.check_output("echo $AtlasProject", shell=True).replace("\n","") - self.ms.current_athena_version = AtlasProject+"-"+AtlasVersion - - def __update_local_pointer__(self): - """update self.local to point to self.ms.local_global_info['MONITORING_TOOL_DICT'] - Needed if self.ms.local_global_info has been overwritten.""" - - self.ms.__update_local_pointer__() - self.local = self.ms.local + print "A release area with locally installed packages has not been setup." + if npackages == 0: + print "No patches found" + return npackages def list_local_tools_read_in(self): @@ -230,7 +319,7 @@ class MenuAwareMonitoring: def get_current_local_info(self,print_output_here=""): """Use the Tool Interrogator to find locally running trigger-monitoring tools. - These tool configurations are made available in the <ThisVariable>.local dictionary.""" + These tool configurations are made available in the <ThisVariable>.ms.local dictionary.""" # check for empty print_output_here # if it is found, use self.print_output @@ -264,7 +353,7 @@ class MenuAwareMonitoring: # construct smck_info for this tool smck_info = {} - smck_info['SMCK_CONFIG'] = smck_config + smck_info['SMCK_CONFIG'] = smck_config smck_info['SMCK_CONFIG_HASH'] = self.ms.__get_config_hash__(smck_config) smck_info['SMCK_SLICE_TYPE'] = smck_config['SliceType'] smck_info['SMCK_TOOL_TYPE'] = tool @@ -281,169 +370,60 @@ class MenuAwareMonitoring: print "This can be passed to MaM methods with the string '"+tool+"'" print "" - - # update self.local - self.__update_local_pointer__() + self.ms.__update_local_pointer__() # add nice spacing if we have been printing tool info if len(mon_tools) > 0: if print_output_here: - print "The extracted data of all local trigger-monitoring tools is stored in <ThisVariable>.local" + print "The extracted data of all local trigger-monitoring tools is stored in <ThisVariable>.ms.local" print "All local trigger-monitoring tools can be passed together as an 'MCK' to MaM diff and search methods with the string 'LOCAL'" print "" - def setup_all_local_tools(self): + def setup_all_local_tools(self,mode=""): "Setup all local trigger-monitoring tools and runs get_current_local_info() to read them in using the Tool Interrogator." - # setup all local packages listed in PackagesToInterrogate via ToolInterrogator - self.ti.load_all_tools() - - # we probably want to read in these tools with the Tool Interrogator - self.get_current_local_info() - - - def upload_smck(self,input1="",processing_step="",comment="",print_output_here=""): - """Upload local configuration for tool 'input1' as an SMCK. - If input1=='local', then all local configuration changes wrt the default will be uploaded. - Optional processing step and comment can be provided.""" - - if self.ms.connected_to_oracle == False: - print "You are not connected to the database, so this function is not available." - return - - if self.replica_db_connection == True: - print "You are connected to the replica database and your connection is read only, so this function is not available to you." + if self.tools_setup == True: + # this is a precautionary measure because it is not clear from testing that tools are resetup correctly if setup is attempted multiple times (probably because tools has protections against this implemented) + print "Tools already set up." + print "Quit and restart to setup again." return - - # check for empty print_output_here - # if it is found, use self.print_output - if print_output_here == "": - print_output_here = self.print_output - - # search for default mck - default_mck = self.ms.get_default_mck_id_from_db() - - # if the default does not exist - if default_mck < 0: - # info for user - if print_output_here: - print "No default for this Athena version ("+self.ms.current_athena_version+") has been uploaded" - print "If you are not running with any local changes to the default, then consider running the command \"<ThisVariable>.ms.upload_default()\"" - return - - # if input is local, then run ms.upload_all_local_changes_as_smck() - if input1 == 'local': - - if print_output_here: - print "You have provided the input 'local'. All local changes wrt the default will be uploaded." - self.ms.upload_all_local_changes_as_smck(processing_step,comment) - - # get all local tool info - #self.get_current_local_info() - - # check if input1 is running (has been read in by the above line) - if not self.ms.local_global_info['MONITORING_TOOL_DICT'].__contains__(input1): - - # this tool has not been loaded - # exit and suggest to the user how to start the tool, if they so wish - if print_output_here: - print "Tool",input1,"is not currently set up locally, so can not have its local configuration uploaded as an SMCK." - print "To list all local tools currently set up and read in, please run \"<ThisVariable>.list_local_tools_read_in()\"" - print "To set up and read in all trigger monitoring tools locally, please run \"<ThisVariable>.setup_all_local_tools()\"" - return - - # get local smck_info for tool input1 - local_smck_info = self.ms.local_global_info['MONITORING_TOOL_DICT'][input1] - - # get default from database - # (should already have been done during __init__, - # but in case the default has only been uploaded in this session then we check again) - self.ms.get_default_from_db() - - # get default smck_info - default_smck_info = self.ms.default_global_info['MONITORING_TOOL_DICT'][input1] - - # create diff of smck_info - # we want diffed_smck_info2, - # which is the 'patch' to apply to the default to get the current local configuration - diffed_smck_info1, diffed_smck_info2 = self.ms.__calculate_diff__(default_smck_info,local_smck_info,False) - - # if there are no local differences wrt the default, then we upload nothing and exit - if diffed_smck_info2 == {}: - - # info for user - if print_output_here: - print "No local differences have been found with respect to the default SMCK (SMCK_ID="+str(default_smck_info['SMCK_ID'])+") for this tool ("+str(input1)+"), for this Athena version ("+self.ms.current_athena_version+")." - print "Nothing shall be uploaded to the Oracle database as a result." - return - - # check if this SMCK already exists - - # if no processing_step is provided, then ask for one - if processing_step=="": - processing_step = self.ms.__ask_for_processing_step__() - - # if no processing_stream is provided, then ask for one - if processing_stream=="": - processing_stream = self.ms.__ask_for_processing_stream__() - - # if no comment is provided, then ask for one - if comment=="": - comment = self.ms.__ask_for_comment__() - - # fill extra smck_info - diffed_smck_info2['SMCK_PROCESSING_STEP'] = processing_step - diffed_smck_info2['SMCK_PROCESSING_STREAM'] = processing_stream - if not diffed_smck_info2.__contains__('SMCK_CONFIG'): - diffed_smck_info2['SMCK_CONFIG'] = {} - diffed_smck_info2['SMCK_CONFIG']['PackageName'] = local_smck_info['SMCK_CONFIG']['PackageName'] - diffed_smck_info2['SMCK_CONFIG']['ToolName'] = local_smck_info['SMCK_CONFIG']['ToolName'] - diffed_smck_info2['SMCK_CONFIG']['ToolSvcName'] = local_smck_info['SMCK_CONFIG']['ToolSvcName'] - if not diffed_smck_info2['SMCK_CONFIG'].__contains__('ToolInfo'): - diffed_smck_info2['SMCK_CONFIG']['ToolInfo'] = {} - diffed_smck_info2['SMCK_CONFIG']['SliceType'] = local_smck_info['SMCK_CONFIG']['SliceType'] - diffed_smck_info2['SMCK_CONFIG']['MonitCategoryName'] = local_smck_info['SMCK_CONFIG']['MonitCategoryName'] - if not diffed_smck_info2['SMCK_CONFIG'].__contains__('MonitCategoryInfo'): - diffed_smck_info2['SMCK_CONFIG']['MonitCategoryInfo'] = {} - diffed_smck_info2['SMCK_CONFIG_HASH'] = self.ms.__get_config_hash__(diffed_smck_info2['SMCK_CONFIG']) - diffed_smck_info2['SMCK_TOOL_TYPE'] = input1 - diffed_smck_info2['SMCK_SLICE_TYPE'] = local_smck_info['SMCK_SLICE_TYPE'] - diffed_smck_info2['SMCK_DEFAULT'] = 0 - diffed_smck_info2['SMCK_ATHENA_VERSION'] = self.ms.current_athena_version - diffed_smck_info2['SMCK_SVN_TAG'] = local_smck_info['SMCK_SVN_TAG'] - diffed_smck_info2['SMCK_CREATOR'] = self.ms.current_user - diffed_smck_info2['SMCK_COMMENT'] = comment - - # if tool_value['SMCK_CONFIG']['ToolInfo'] and tool_value['SMCK_CONFIG']['MonitCategoryInfo'] are both empty, then we don't want to include this as a new SMCK - if diffed_smck_info2['SMCK_CONFIG']['ToolInfo'] == {} and diffed_smck_info2['SMCK_CONFIG']['MonitCategoryInfo'] == {}: + # enabling setting of monitoring mode + if mode.lower() == "hi": + mode = "HI" + else: + mode = mode.lower() - # info for user - if print_output_here: - print "No local differences have been found with respect to the default SMCK (SMCK_ID="+str(default_smck_info['SMCK_ID'])+") for this tool ("+str(input1)+"), for this Athena version ("+self.ms.current_athena_version+")." - print "Nothing shall be uploaded to the Oracle database as a result." - return + if mode not in [ "pp", "HI", "cosmic", "mc", "" ]: + print "Unrecognised setup mode: using default" + mode = "" + else: + print "Using mode",mode + + if mode == "pp": + DQMonFlags.monManDataType = 'collisions' + elif mode == "mc": + DQMonFlags.monManDataType = 'monteCarlo' + elif mode == "HI": + DQMonFlags.monManDataType = 'heavyioncollisions' + elif mode == "cosmic": + DQMonFlags.monManDataType = 'cosmics' + print "DQMonFlags.monManDataType() =",DQMonFlags.monManDataType() - # upload smck_info (diffed_smck_info2) - new_smck_id = self.ms.oi.upload_smck(diffed_smck_info2) + # setup all local packages listed in PackagesToInterrogate via ToolInterrogator + self.ti.load_all_tools() - # info for user - if print_output_here: + # read in these tools with the Tool Interrogator + self.get_current_local_info() - # print new smck_id and smck_tool_patch_version - print "This is SMCK (SMCK_ID)",new_smck_id - print "with SMCK_TOOL_PATCH_VERSION",diffed_smck_info2['SMCK_TOOL_PATCH_VERSION'] + self.tools_setup = True + self.setup_mode = mode - # if we are running silently, still return the smck_id and smck_tool_patch_version - # (ie. in case this function has been called by another function, which might like to know the smck_id and smck_tool_patch_version) - else: - return new_smck_id, diffed_smck_info2['SMCK_TOOL_PATCH_VERSION'] - def __ask_for_default__(self): """If running interactively, ask user whether this upload is to be a default for an Athena release, or a patch.""" - + # is this session interactive? If not, return "ALL" if self.ms.__is_session_interactive__(): @@ -628,7 +608,7 @@ class MenuAwareMonitoring: # if mck_info == -1 then we've not found an mck, so return False if mck_info == -1: return False - + # else this is a valid mck else: return True @@ -656,12 +636,23 @@ class MenuAwareMonitoring: # if smck_id == -1 then we've not found an smck, so return False if smck_id == -1: return False - + # else this is a valid smck else: return True + def __refresh_HLTMonTriggerList__(self): + """If a slice MonitCategory has been updated, + then this function can be used to update HLTMonTriggerList.""" + + # import HLTMonTriggerList + from TrigHLTMonitoring.HLTMonTriggerList import hltmonList + + # now we rerun hltmonList.config() to update it with all the latest info from all slices + hltmonList.config() + + def diff(self,input1="",flag1="",input2="",flag2="",diff_all=False,print_output_here=""): """Perform a diff between two inputs, and print the results. If diff_all==True, all items that are not identical in both inputs will be returned." @@ -997,16 +988,23 @@ class MenuAwareMonitoring: # nice spacing for user print "" + def does_mck_athena_version_match_current_athena_version(self, mck_id): - "Compares the current Athena version to the one of the requested MCK" - + "Compares the current Athena version to the MCK athena version" + mck_info = self.ms.oi.read_mck_info_from_db(mck_id) mck_athena_version = mck_info['MCK_ATHENA_VERSION'] - - # change this to compare first 4 digits + return self.ms.check_compatibility_of_two_release_versions(mck_athena_version,self.ms.current_athena_version) - #return mck_athena_version == self.ms.current_athena_version + + def does_smck_athena_version_match_current_athena_version(self, smck_id): + "Compares the current Athena version to the SMCK athena version" + + smck_info = self.ms.oi.read_smck_info_from_db(smck_id) + smck_athena_version = smck_info['SMCK_ATHENA_VERSION'] + + return self.ms.check_compatibility_of_two_release_versions(smck_athena_version,self.ms.current_athena_version) def apply_mck(self,input1="",print_output_here=""): @@ -1016,19 +1014,20 @@ class MenuAwareMonitoring: print "You are not connected to the database, so this function is not available." return - # check for empty print_output_here - # if it is found, use self.print_output if print_output_here == "": print_output_here = self.print_output - # is the input a valid mck? if not self.__is_input_an_mck__(input1): - - # info for user if print_output_here: print "MCK",input1,"has not been recognised as a valid MCK." return + # check the athena version + mck_athena_version = self.ms.oi.read_mck_info_from_db(input1)['MCK_ATHENA_VERSION'] + if not self.does_mck_athena_version_match_current_athena_version(input1): + print "MCK",input1,"is for Athena version",mck_athena_version,"but MAM is running in",self.ms.current_athena_version,"-> this MCK will not be applied." + return + # get list of smck_id that this mck links to smck_ids = self.ms.oi.read_mck_links_from_db(input1) @@ -1053,7 +1052,7 @@ class MenuAwareMonitoring: # make sure we have the smck_id smck_id = self.ms.__get_smck_id_from_smck_identifier__(input1) - + # if we don't if smck_id == -1: @@ -1068,9 +1067,8 @@ class MenuAwareMonitoring: # get the release this SMCK was created for smck_athena_version = smck_info['SMCK_ATHENA_VERSION'] # compare to our release - if smck_athena_version != self.ms.current_athena_version: - print "SMCK",input1,"is for athena version",smck_athena_version,", but MAM is running in ",self.ms.current_athena_version,". This SMCK will not be applied." - return + if not self.does_smck_athena_version_match_current_athena_version(input1): + print "SMCK",input1,"is for Athena version",smck_athena_version,"but MAM is running in",self.ms.current_athena_version,"-> this SMCK will still be applied." # get the processing step this smck should be used for processing_step = smck_info['SMCK_PROCESSING_STEP'] @@ -1079,7 +1077,7 @@ class MenuAwareMonitoring: # info for user if print_output_here: - print "SMCK",input1,"is for the Athena processing stage '"+processing_step+"', which we are not currently in. This SMCK will not be applied." + print "SMCK",input1,"is for the Athena processing stage '"+processing_step+"' which we are not currently in -> this SMCK will not be applied." return # get the processing stream this smck should be used for @@ -1089,7 +1087,7 @@ class MenuAwareMonitoring: # info for user if print_output_here: - print "SMCK",input1,"is for the Athena processing stream '"+processing_stream+"', which we are not currently using. This SMCK will not be applied." + print "SMCK",input1,"is for the Athena processing stream '"+processing_stream+"', which we are not currently using -> this SMCK will not be applied." return # get the ToolSvc_tool_name @@ -1103,7 +1101,7 @@ class MenuAwareMonitoring: # info for user if print_output_here: - print "SMCK",input1," corresponds to the tool",ToolSvc_tool_name,"which is not running locally, so can not be configured with this SMCK." + print "SMCK",input1,"corresponds to the tool",ToolSvc_tool_name,"which is not running locally, so can not be configured with this SMCK." return # get the patch config @@ -1117,7 +1115,7 @@ class MenuAwareMonitoring: # test if the tool has this variable tool_contains_variable = False exec "tool_contains_variable = hasattr(ToolSvc.%s,tool_key)" % (ToolSvc_tool_name) - + # if the tool has this variable if tool_contains_variable: @@ -1162,7 +1160,7 @@ class MenuAwareMonitoring: # check that the values are equal if new_value == tool_value: - + # apply the config for this variable exec "ToolSvc.%s.%s = new_value" % (ToolSvc_tool_name,tool_key) @@ -1205,9 +1203,9 @@ class MenuAwareMonitoring: # test if the monitCategory_object has this variable monitCategory_object_contains_variable = False monitCategory_object_contains_variable = hasattr(monitCategory_object,key) - + # if the monitCategory_object has this variable - if monitCategory_object_contains_variable: + if monitCategory_object_contains_variable: # get the type of the value type_to_set_to = type @@ -1250,7 +1248,7 @@ class MenuAwareMonitoring: # check that the values are equal if new_value == tool_value: - + # apply the config for this variable exec "ToolSvc.%s.%s = new_value" % (ToolSvc_tool_name,tool_key) @@ -1281,70 +1279,67 @@ class MenuAwareMonitoring: print "SMCK",input1,"has been applied as a config patch to tool",ToolSvc_tool_name - def __refresh_HLTMonTriggerList__(self): - """If a slice MonitCategory has been updated, - then this function can be used to update HLTMonTriggerList.""" - - # import HLTMonTriggerList - from TrigHLTMonitoring.HLTMonTriggerList import hltmonList - - # now we rerun hltmonList.config() to update it with all the latest info from all slices - hltmonList.config() - def get_mck_id_from_smk(self,input_smk): """Input an SMK, and get an MCK_ID back. If no MCK is found, -1 is returned. - If an MCK of 0 is returned, this is intended to signify + If an MCK of 0 is returned, this is intended to signify that the default tool configurations should be used.""" - + if self.ms.connected_to_oracle == False: print "You are not connected to the database, so this function is not available." return - + # returns an empty list if no MCK or a list of the MCK details - mck_info = self.ms.oi.find_active_smk_to_mck_link(input_smk) + mck_info = self.ms.oi.find_active_smk_to_mck_link(input_smk) if len(mck_info) > 0: return mck_info[0][0] - + # if we've made it this far, then an mck has not been found, so return 0 (no link) return 0 - def get_mck_id_from_smk_old(self,input_smk): - """Input an SMK, and get an MCK_ID back. - If no MCK is found, -1 is returned. - If an MCK of 0 is returned, this is intended to signify - that the default tool configurations should be used. - This is the old version which returns the MCK linked to the next highest - SMK in the case where the SMK is not linked.""" - if self.ms.connected_to_oracle == False: - print "You are not connected to the database, so this function is not available." + def dump_mck_to_json(self,mck_id,output_json_filename=""): + "Dump the contents of an MCK to a json file, including the contents of linked SMCKs" + + if not self.__is_input_an_mck__(mck_id): + print "MCK",mck_id,"has not been recognised as a valid MCK." return - # get list of all mck_to_smk links - mck_to_smk_links = [] - mck_to_smk_links = self.ms.oi.get_all_mck_to_smk_links() + if output_json_filename == "": + output_json_filename = "MCK_"+str(mck_id)+".json" - # loop over the list - for link in mck_to_smk_links: + output_file = open( output_json_filename, "w" ) - # only consider active links - if link['ACTIVE'] == '1': + mck_info = self.ms.oi.read_mck_info_from_db(mck_id) + smck_ids = self.ms.oi.read_mck_links_from_db(mck_id) - # the list should be ordered in smk, from largest to smallest - # check if this link's smk is equal to or less than the input_smk - if link['SMK'] <= input_smk: - - # then this is the link we want, so return the mck - return link['MCK'] + mck_dump_info = {} + # datetime.datetime objects are not JSON serializable + # seeing as this info is not used later, we replace with the ctime + mck_info['MCK_CREATION_DATE'] = mck_info['MCK_CREATION_DATE'].ctime() + mck_dump_info['MCK'] = mck_info - # if we've made it this far, then an mck has not been found, so return -1 - return -1 + # need to add rest of MCK info + + #combine the info in the MONITORING_TOOL_DICT + mck_dump_info['MONITORING_TOOL_DICT'] = {} + for smck_id in smck_ids: + smck_info = self.ms.oi.read_smck_info_from_db(smck_id) + smck_info['SMCK_CREATION_DATE'] = smck_info['SMCK_CREATION_DATE'].ctime() + tool_type = smck_info['SMCK_TOOL_TYPE'] + mck_dump_info['MONITORING_TOOL_DICT'][tool_type] = smck_info + + json.dump(mck_dump_info, output_file, ensure_ascii=True, sort_keys=True) + output_file.close() + + + def dump_local_config_to_json(self,output_json_filename="mam_configs.json",processing_step="",processing_stream="",comment="",default=""): + "All locally read-in trigger monitoring tool configurations are output to a file." + + if output_json_filename == "": + output_json_filename="mam_configs.json" - def dump_local_config_to_json(self,output_json_filename="mam_configs.json",processing_step="",processing_stream="",comment="",default="",print_output_here=""): - "All locally read-in trigger monitoring tool configurations are output to a json file." - # create a file-like-object to write the json to output_file = open( output_json_filename , "w" ) @@ -1393,28 +1388,48 @@ class MenuAwareMonitoring: tool_value['SMCK_COMMENT'] = comment # json encode the local global info, and dump it to the output file - json.dump(self.ms.local_global_info, output_file, ensure_ascii=True, sort_keys=True) + json.dump(self.ms.local_global_info, output_file, ensure_ascii=True, sort_keys=True) # close the output file output_file.close() - def diff_json_files(self,input_default_config_file="mam_default_configs.json",input_config_file="mam_configs.json",output_json_filename="mam_patch_configs.json"): - """Input a default config, and an alternative config. A diff will be performed. - The parts of the alternative config that are different to the default config + def make_default_json(self,output_json_filename="",comment=""): + "All locally read-in trigger monitoring tool configurations are output to a file, marked as a default." + + if output_json_filename == "": + output_json_filename = "mam_defaults.json" + + filename = output_json_filename + tempcomment = comment + + self.dump_local_config_to_json(output_json_filename=filename,comment=tempcomment,default=1) + + + def diff_json_files(self,input_default_config_file="",input_config_file="",output_json_filename="mam_diff_configs.json"): + """Input a default config, and an alternative config. A diff will be performed. + The parts of the alternative config that are different to the default config will be returned as a new json file.""" + if input_default_config_file == "" or input_config_file == "": + print "Please specify the input_default_file and input_config_file names." + return + # open all file-like-objects - input_default_file = open( input_default_config_file , "r" ) - input_alt_file = open( input_config_file , "r" ) - output_file = open( output_json_filename , "w" ) + try: + input_default_file = open( input_default_config_file , "r" ) + input_alt_file = open( input_config_file , "r" ) + output_file = open( output_json_filename , "w" ) + except: + print "Unable to open one or more config files." + return # get the input dictionaries input_default_dict = self.ms.oi.__unicode_to_str__( json.load(input_default_file) ) input_alt_dict = self.ms.oi.__unicode_to_str__( json.load(input_alt_file) ) # perform a diff of these dicts - # we want diffed_info2, + # we want diffed_info2, # which is the 'patch' to apply to the default to get the current local configuration diffed_info1, diffed_info2 = self.ms.__calculate_diff__(input_default_dict,input_alt_dict,False) @@ -1425,3 +1440,132 @@ class MenuAwareMonitoring: input_default_file.close() input_alt_file.close() output_file.close() + + + def make_patch_json(self,output_json_filename="",processing_step="",processing_stream="",comment="",CleanRunHeadDir='/tmp/'): + + if not self.find_and_list_patch_packages(): + print "Have not been able to detect you have set up any packages" + print "Continuing anyway..." + + if self.tools_setup == False: + print "Please run mam.setup_all_local_tools(monitoring_mode) first (where monitoring_mode is pp, HI, cosmic or mc)" + return + + # if no processing_step is provided, then ask for one + if processing_step=="": + processing_step = self.ms.__ask_for_processing_step__() + # if no processing_step is provided, then ask for one + if processing_stream=="": + processing_stream = self.ms.__ask_for_processing_stream__() + # if no comment is provided, then ask for one + if comment=="": + comment = self.ms.__ask_for_comment__() + + # need to get release accurately and in a form the command can run - use get_release_setup + release = self.get_release_setup() + + # this is not a default, so we need to get the default and perform the diff before dumping + print "Will get default in clean directory for release",release #release + + if str(CleanRunHeadDir) == "/tmp/": + myUser = os.environ['USER'] + CleanRunHeadDir = "/tmp/"+str(myUser) + + if os.path.exists(CleanRunHeadDir): + print "The head directory used to obtain the default for release",release,"will be",CleanRunHeadDir + else: + print "Please specify a directory that exists for the argument CleanRunHeadDir" + return + + UniqID = str(uuid.uuid4()) + CleanDirName="default_"+UniqID + # print CleanDirName + + default_json_filename = self.ms.current_athena_version+'_default.json' + + # do the getting of the default via a shell here + cmd = ["RunProcessWithMonitor.py","DumpDefaultMonConfig.sh",CleanRunHeadDir,CleanDirName,release,default_json_filename,self.setup_mode] + subprocess.Popen(cmd).communicate() + + cwd = os.getcwd() + try: + shutil.copy2(CleanRunHeadDir+'/'+CleanDirName+'/'+default_json_filename, cwd) + except: + print "Getting default configuration json for",release,"failed" + return + + self.ms.get_default_from_json(default_json_filename) + + # create diff of global_info + # we want diffed_global_info2, which is the 'patch' to apply to the default to get the current local configuration + diffed_global_info1, diffed_global_info2 = self.ms.__calculate_diff__(self.ms.default_global_info,self.ms.local_global_info,False) + + # if there are no local differences wrt the default, then we dump nothing and exit + if diffed_global_info2 == {}: + print "No local differences have been found with respect to the default configuration for Athena version "+athena_version+"." + print "Nothing shall be dumped to json as a result." + return + + # fill extra mck_info + diffed_global_info2['MCK'] = {} + diffed_global_info2['MCK']['MCK_DEFAULT'] = 0 + diffed_global_info2['MCK']['MCK_ATHENA_VERSION'] = self.ms.current_athena_version + diffed_global_info2['MCK']['MCK_CREATOR'] = self.ms.current_user + diffed_global_info2['MCK']['MCK_COMMENT'] = comment + + # in case we want to remove any diffed_global_info2['MONITORING_TOOL_DICT'] items, we must make a list of the keys, + # and then delete these keys after we have finished iterating over diffed_global_info2['MONITORING_TOOL_DICT'] + # It is not possible to delete elements while iterating over a list or dict + keys_to_delete = [] + + # fill extra smck_info for all tools + for tool_key, tool_value in diffed_global_info2['MONITORING_TOOL_DICT'].iteritems(): + + # fill extra smck_info + tool_value['SMCK_PROCESSING_STEP'] = processing_step + tool_value['SMCK_PROCESSING_STREAM'] = processing_stream + if not tool_value.__contains__('SMCK_CONFIG'): + tool_value['SMCK_CONFIG'] = {} + tool_value['SMCK_CONFIG']['PackageName'] = self.ms.local_global_info['MONITORING_TOOL_DICT'][tool_key]['SMCK_CONFIG']['PackageName'] + tool_value['SMCK_CONFIG']['ToolName'] = self.ms.local_global_info['MONITORING_TOOL_DICT'][tool_key]['SMCK_CONFIG']['ToolName'] + tool_value['SMCK_CONFIG']['ToolSvcName'] = self.ms.local_global_info['MONITORING_TOOL_DICT'][tool_key]['SMCK_CONFIG']['ToolSvcName'] + if not tool_value['SMCK_CONFIG'].__contains__('ToolInfo'): + tool_value['SMCK_CONFIG']['ToolInfo'] = {} + tool_value['SMCK_CONFIG']['SliceType'] = self.ms.local_global_info['MONITORING_TOOL_DICT'][tool_key]['SMCK_CONFIG']['SliceType'] + tool_value['SMCK_CONFIG']['MonitCategoryName'] = self.ms.local_global_info['MONITORING_TOOL_DICT'][tool_key]['SMCK_CONFIG']['MonitCategoryName'] + if not tool_value['SMCK_CONFIG'].__contains__('MonitCategoryInfo'): + tool_value['SMCK_CONFIG']['MonitCategoryInfo'] = {} + tool_value['SMCK_CONFIG_HASH'] = self.ms.__get_config_hash__(tool_value['SMCK_CONFIG']) + tool_value['SMCK_TOOL_TYPE'] = tool_key + tool_value['SMCK_SLICE_TYPE'] = self.ms.local_global_info['MONITORING_TOOL_DICT'][tool_key]['SMCK_SLICE_TYPE'] + tool_value['SMCK_DEFAULT'] = 0 + tool_value['SMCK_ATHENA_VERSION'] = self.ms.current_athena_version + tool_value['SMCK_SVN_TAG'] = self.ms.local_global_info['MONITORING_TOOL_DICT'][tool_key]['SMCK_SVN_TAG'] + tool_value['SMCK_CREATOR'] = self.ms.current_user + tool_value['SMCK_COMMENT'] = comment + + # if tool_value['SMCK_CONFIG']['ToolInfo'] and tool_value['SMCK_CONFIG']['MonitCategoryInfo'] are both empty, then we don't want to include this as a new SMCK + if tool_value['SMCK_CONFIG']['ToolInfo'] == {} and tool_value['SMCK_CONFIG']['MonitCategoryInfo'] == {}: + keys_to_delete.append(tool_key) + + # if there are any items in keys_to_delete to be deleted from diffed_global_info2['MONITORING_TOOL_DICT'] then delete them now + for tool_key in keys_to_delete: + diffed_global_info2['MONITORING_TOOL_DICT'].__delitem__(tool_key) + + # if there are no items in diffed_global_info2['MONITORING_TOOL_DICT'] then we do not want to upload anything + if len(diffed_global_info2['MONITORING_TOOL_DICT']) == 0: + print "No local differences have been found with respect to the default for Athena version "+self.ms.current_athena_version+"." + print "Nothing shall be dumped to json as a result." + return + + if output_json_filename=="": + output_json_filename = str(self.ms.current_athena_version) + output_json_filename = output_json_filename + "_patch.json" + + # json encode the remaining diffed info, and dump it to the output file + output_file = open( output_json_filename , "w" ) + json.dump(diffed_global_info2, output_file, ensure_ascii=True, sort_keys=True) + output_file.close() + + print "Dumped patch json as",output_json_filename diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoringStandalone.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoringStandalone.py index 36a0a9793c0d..b7e0981b508b 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoringStandalone.py +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoringStandalone.py @@ -9,7 +9,6 @@ import sys,os # import subprocess. Required to get user import subprocess from xml.dom import minidom -# import oracle interaction class from OracleInterface import OracleInterface # import hash library for generating smck config hashes import hashlib @@ -21,7 +20,7 @@ import json class MenuAwareMonitoringStandalone: """Menu-aware Monitoring standalone 'Athena-less' class. Provides functionality for the upload/download of trigger-monitoring tool configurations to an Oracle database.""" - + def __init__(self,alias="TRIGGERDBR2MAM",database_username="",database_password="",database_name="",database_directory=""): """Setup Menu-aware Monitoring, find locally running trigger-monitoring tools, @@ -29,7 +28,7 @@ class MenuAwareMonitoringStandalone: and get the current default from the database (if it exists).""" # MaM code version - self.version = '1.3.0' + self.version = '1.4.6' # flag for setting whether to print out anything to screen or not self.print_output = True @@ -44,7 +43,7 @@ class MenuAwareMonitoringStandalone: # Since this code is Athena-independent, we use a dummy 'Athena version' # If called from MenuAwareMonitoring, this will be updated to the real version self.current_athena_version = "MaM P1 GUI" - + # holder for default global_info self.default_global_info = {} @@ -57,25 +56,29 @@ class MenuAwareMonitoringStandalone: # flag to record if we have connected to Oracle self.connected_to_oracle = False - + self.connection_alias = "" + # flag to indicate if we are connected to the replica database with a readonly connection + self.readonly_db_connection = False + # second flag so MAM doesn't break - will remove this later self.replica_db_connection = False + # greetings + print "Welcome to Menu-aware Monitoring (MaM) version",self.version + print "Please report any bugs to xanthe.hoad@cern.ch" + print "You are",self.current_user + # now connect to oracle self.__connect_to_oracle__(alias,database_username,database_password,database_name,database_directory) if self.connected_to_oracle == False: if alias == "TRIGGERDBR2MAM": - print "Exiting." + print "Exiting" sys.exit(1) else: - print "Some Menu Amare Monitoring functions will not be available to you." - - # print guide for user - print "WARNING, THIS VERSION OF MAM IS EXPERIMENTAL, PLEASE USE TAG TrigHLTMonitoring-00-07-13" - print "Authors of this code: Ben Smart (Ben.Smart@cern.ch), Xanthe Hoad (Xanthe.Hoad@cern.ch)" - print "This is Menu-aware monitoring version",self.version - print "You are",self.current_user - print "This version of MaM is in the process of being tested, please report any bugs to the authors." + print "Some Menu-aware Monitoring functions will not be available to you" + else: + self.connection_alias = alias + self.replica_db_connection = self.readonly_db_connection def __connect_to_oracle__(self,alias,database_username,database_password,database_name,database_directory): @@ -89,45 +92,67 @@ class MenuAwareMonitoringStandalone: # else if we are not else: - # info for user print "We are now connecting to the Oracle database" - - if alias.upper() is "CUSTOM_DB": + + if alias is "CUSTOM_DB": print "Connecting to database",database_name,"with provided username, password and directory" - print "MAM may not work as expected while using a custom DB" + print "Caution: MaM may not work as expected while using a custom DB" self.oi.connect_to_oracle(database_username,database_password,database_name,database_directory) self.connected_to_oracle = True - elif alias.upper() in ("TRIGGERDB","TRIGGERDBREPR","TRIGGERDBR2MAM"): - # try catch - try: - - # get the connection from authentication.xml - connectionSvc = MenuAwareMonitoringStandalone._getConnectionServicesForAlias(alias)[0] - print "Connection Service %s" % connectionSvc - user,pw = MenuAwareMonitoringStandalone._readAuthentication(alias)[connectionSvc] - server = connectionSvc.split('/')[2] - if alias is "TRIGGERDBREPR": - directory = "ATLAS_CONF_TRIGGER_REPR" + elif alias in ("TRIGGERDB","TRIGGERDBREPR","TRIGGERDBREPR_R","TRIGGERDBR2MAM"): + + # get the connection from authentication.xml + connectionSvcs = self._getConnectionServicesForAlias(alias) + # print "Connection Services %s" % connectionSvcs + for connectionSvc in connectionSvcs: + print "Trying connection service",connectionSvc + if 'oracle' in connectionSvc: + try: + oracle_server = connectionSvc.split('/')[2] + user,pw = self._readAuthentication(alias)[connectionSvc] + if alias in ["TRIGGERDBREPR","TRIGGERDBREPR_R"]: + directory = "ATLAS_CONF_TRIGGER_REPR" + else: + directory = "ATLAS_CONF_TRIGGER_RUN2" + print "User %s, server %s, directory %s" % (user,oracle_server,directory) + self.oi.connect_to_oracle(user,pw,oracle_server,directory) + # flag to record that we have connected + self.connected_to_oracle = True + if alias in ["TRIGGERDB","TRIGGERDBREPR_R"]: + self.readonly_db_connection = True + break + except: + print "Direct Oracle connection not possible, trying Frontier if available..." + self.connected_to_oracle = False + elif 'frontier' in connectionSvc: + try: + frontier_server = os.getenv('FRONTIER_SERVER',None) + if alias in ["TRIGGERDBREPR","TRIGGERDBREPR_R"]: + directory = "ATLAS_CONF_TRIGGER_REPR" + else: + directory = "ATLAS_CONF_TRIGGER_RUN2" + schema = directory + print "Frontier Server %s, schema %s" % (frontier_server,schema) + # connect to oracle + self.oi.connect_to_oracle_via_frontier(frontier_server,schema,directory) + # flag to record that we have connected + self.connected_to_oracle = True + if alias in ["TRIGGERDB","TRIGGERDBREPR_R"]: + self.readonly_db_connection = True + break + except: + print "Oracle connection via Frontier not possible." + self.connected_to_oracle = False else: - directory = "ATLAS_CONF_TRIGGER_RUN2" - print "User %s, server %s, directory %s" % (user,server,directory) - # connect to oracle - self.oi.connect_to_oracle(user,pw,server,directory) - # flag to record that we have connected - self.connected_to_oracle = True - if alias is "TRIGGERDB": - self.replica_db_connection = True - - except : - # info for user - print "Error while connecting to Oracle database." - self.connected_to_oracle = False + print "Error while connecting to Oracle database, no connections found" + self.connected_to_oracle = False else: - print "Error while connecting to Oracle database: Unrecognised database alias",alias + print "Error while connecting to Oracle database: Unrecognised database alias",alias self.connected_to_oracle = False + def __disconnect_from_oracle__(self): "Disconnect from the Oracle server." @@ -140,10 +165,10 @@ class MenuAwareMonitoringStandalone: # disconnect from oracle if self.oi.disconnect_from_oracle(): #returns True if disconnection is successful self.connected_to_oracle = False - self.replica_db_connection = False + self.readonly_db_connection = False + - @staticmethod - def _getConnectionServicesForAlias(alias): + def _getConnectionServicesForAlias(self,alias): connectionServices = None # list of services if alias is "TRIGGERDBR2MAM": @@ -151,8 +176,12 @@ class MenuAwareMonitoringStandalone: elif alias is "TRIGGERDBREPR": DBLOOKUP_PATH = '/afs/cern.ch/user/a/attrgcnf/.dbauth/menuexperts' else: + # TRIGGERDBREPR_R uses this path DBLOOKUP_PATH = 'CORAL_DBLOOKUP_PATH' - dblookupfilename = MenuAwareMonitoringStandalone._getFileLocalOrPath('dblookup.xml',DBLOOKUP_PATH) + if alias is "TRIGGERDBREPR_R": + # _R means we access the regular auth files instead of the ones with read/write access, need to change the alias back to TRIGGERDBREPR so that we look up the right details in the auth files + alias = "TRIGGERDBREPR" + dblookupfilename = self._getFileLocalOrPath('dblookup.xml',DBLOOKUP_PATH) if dblookupfilename == None: return None doc = minidom.parse(dblookupfilename) @@ -166,12 +195,12 @@ class MenuAwareMonitoringStandalone: print("Trigger connection alias '%s' is not defined in %s" % (alias,dblookupfilename)) return connectionServices - @staticmethod - def _readAuthentication(alias): + + def _readAuthentication(self,alias): """ read authentication.xml, first from local directory, then from all paths specified in CORAL_AUTH_PATH returns dictionary d with d[connection] -> (user,pw) """ - + authDict = {} if alias is "TRIGGERDBR2MAM": AUTH_PATH = 'MAM_CORAL_AUTH_PATH' @@ -179,9 +208,9 @@ class MenuAwareMonitoringStandalone: AUTH_PATH = '/afs/cern.ch/user/a/attrgcnf/.dbauth/menuexperts' else: AUTH_PATH = 'CORAL_AUTH_PATH' - dbauthfilename = MenuAwareMonitoringStandalone._getFileLocalOrPath('authentication.xml',AUTH_PATH) + dbauthfilename = self._getFileLocalOrPath('authentication.xml',AUTH_PATH) if dbauthfilename == None: return authDict - + doc = minidom.parse(dbauthfilename) for cn in doc.getElementsByTagName('connection'): user = "" @@ -193,9 +222,9 @@ class MenuAwareMonitoringStandalone: authDict[cn.attributes['name'].value] = (user,pw) doc.unlink() return authDict - - @staticmethod - def FindFile( filename, pathlist, access ): + + + def FindFile(self,filename, pathlist, access ): """Find <filename> with rights <access> through <pathlist>.""" # special case for those filenames that already contain a path @@ -210,9 +239,9 @@ class MenuAwareMonitoringStandalone: return f return None - - @staticmethod - def _getFileLocalOrPath(filename, pathenv): + + + def _getFileLocalOrPath(self,filename, pathenv): """looks for filename in local directory and then in all paths specified in environment variable 'pathenv' returns path/filename if existing, otherwise None """ @@ -221,7 +250,7 @@ class MenuAwareMonitoringStandalone: return filename pathlist = os.getenv(pathenv,'').split(os.pathsep) - resolvedfilename = MenuAwareMonitoringStandalone.FindFile(filename, pathlist, os.R_OK) + resolvedfilename = self.FindFile(filename, pathlist, os.R_OK) if resolvedfilename: return resolvedfilename else: @@ -251,7 +280,7 @@ class MenuAwareMonitoringStandalone: # if the input is not a string, make it one if type(smck_config) is not str: - + # use json to turn the config into a string smck_config_json = json.dumps(smck_config, ensure_ascii=True, sort_keys=True) @@ -288,7 +317,7 @@ class MenuAwareMonitoringStandalone: # warn user that their input is too long, and give them the change to re-enter a comment print "Your comment was too long, (it was",len(user_input),"characters long). Please enter a shorter comment." - + # get user input user_input = raw_input("comment: ") @@ -304,7 +333,7 @@ class MenuAwareMonitoringStandalone: def __ask_for_processing_step__(self): """If running interactively, ask user for the processing step(s) an upload will be valid for. 'ALL' is the default.""" - + # is this session interactive? If not, return "ALL" if self.__is_session_interactive__(): @@ -357,7 +386,7 @@ class MenuAwareMonitoringStandalone: def __ask_for_processing_stream__(self): """If running interactively, ask user for the processing stream(s) an upload will be valid for. 'ALL' is the default.""" - + # is this session interactive? If not, return "ALL" if self.__is_session_interactive__(): @@ -414,23 +443,12 @@ class MenuAwareMonitoringStandalone: # # (if -i was passed as a command-line argument) if sys.argv.__contains__('athena.py'): return sys.argv.__contains__('-i') - else: + else: return True def link_smk_to_mck(self,input_smk="",input_mck_id="",comment=""): """Upload a link between an SMK and an MCK. - All SMK from the input SMK onwards will be linked to the input MCK, - unless another link to a larger SMK is uploaded. - ie. if link_smk_to_mck(71,123) - and link_smk_to_mck(72,456) - are both called, then: - SMK 123 to SMK 455 will link to MCK 71 - SMK 456 and greater will link to MCK 72 - As uploaded MCK have positive (non-zero) values, - linking an SMK to MCK=0 is intended to signify - that the default tool configurations should be used. - ie. no MCK should be applied for this SMK. An optional comment can be attached to the upload. If you upload a link for an SMK, and a link already exists for that SMK, then a new link will be created and made active. @@ -461,7 +479,7 @@ class MenuAwareMonitoringStandalone: print "Please enter an MCK_ID integer to be linked." # now get user input - input_str = raw_input("MCK_ID: ") + input_str = raw_input("MCK_ID: ") # try to convert the input str to int try: @@ -471,7 +489,7 @@ class MenuAwareMonitoringStandalone: # if this session is not interactive else: - + # if the inputs are not valid if (type(input_mck_id) is not int) or (type(input_smk) is not int): @@ -480,15 +498,15 @@ class MenuAwareMonitoringStandalone: print "Menu-aware monitoring: link_smk_to_mck(",input_smk,",",input_mck_id,",",comment,") inputs not valid. MCK and SMK must be integers." print "No MCK to SMK link created." return - + if not self.oi.check_if_smk_exists(input_smk): - print "SMK",input_smk,"does not exist in the trigger DB." - print "No MCK to SMK link created." + print "SMK",input_smk,"does not exist." + print "No MCK to SMK link created." return - + if (not self.oi.check_if_mck_id_exists(input_mck_id)) and (input_mck_id is not 0): print "MCK",input_mck_id,"does not exist in database." - print "No MCK to SMK link created." + print "No MCK to SMK link created." return # if we've got this far, then the input MCK and SMK should be valid @@ -519,20 +537,20 @@ class MenuAwareMonitoringStandalone: # if a comment has not been provided, then ask for one if comment == "": comment = self.__ask_for_comment__() - + # try to upload the link try: # info for user print "Now attempting to link MCK",input_mck_id,"to SMK",input_smk - + if not self.oi.check_if_smk_to_mck_link_exists(input_smk,input_mck_id): # link does not exist # upload the link self.oi.upload_mck_to_smk_link(input_mck_id,input_smk,creator,comment) - self.oi.activate_smk_mck_link(input_mck_id,input_smk) - + self.oi.activate_smk_mck_link(input_smk,input_mck_id) + # info for user print "MCK",input_mck_id,"has been linked to SMK",input_smk @@ -546,6 +564,7 @@ class MenuAwareMonitoringStandalone: print "An exception occurred:",sys.exc_info()[0],sys.exc_info()[1] print "Error in link upload." + def force_deactivate_all_links_for_smk(self,input_smk,GUI=False): """Allows the user to clear all links for an smk, so that a new link can be created""" @@ -562,7 +581,7 @@ class MenuAwareMonitoringStandalone: if GUI is False: print "Will force deactivate all links for SMK",input_smk,". Do you really want to do this?" user_input = raw_input("y/n: ") - + if user_input != 'y': print "Aborted." return @@ -571,9 +590,35 @@ class MenuAwareMonitoringStandalone: self.oi.deactivate_all_links_for_given_smk(input_smk) print "All links deactivated." + + def print_all_mck_to_smk_links(self,print_deactivated_links=False): + """Print MCK to SMK links. By default prints only active links. + print_all_mck_to_smk_links(True) to print all links.""" + + if self.connected_to_oracle == False: + print "You are not connected to the database, so this function is not available." + return + + # get list of all mck_to_smk links + mck_to_smk_links = [] + mck_to_smk_links = self.oi.get_all_mck_to_smk_links() + + # reverse the list in place + mck_to_smk_links.reverse() + + # loop over the list in reverse (lowest smk to largest smk) + for link in mck_to_smk_links: + + # print the results + if link['ACTIVE'] == '1': + print "SMK",link['SMK'],"is linked to MCK",link['MCK'],"(ACTIVE LINK)" + elif print_deactivated_links: + print "SMK",link['SMK'],"was previously linked to MCK",link['MCK'],"(DEACTIVATED LINK)" + + def __get_smck_id_from_smck_identifier__(self,smck_identifier,print_output_here=""): "Input can either be an SMCK_ID or an SMCK_TOOL_PATCH_VERSION. Output will be the the correct SMCK_ID." - + if self.connected_to_oracle == False: print "You are not connected to the database, so this function is not available." return @@ -627,13 +672,14 @@ class MenuAwareMonitoringStandalone: # return smck_id return smck_id + def get_smck_info_from_db(self,smck_identifier,print_output_here=""): "Get an SMCK configuration from the Oracle database." - + if self.connected_to_oracle == False: print "You are not connected to the database, so this function is not available." return - + # check for empty print_output_here # if it is found, use self.print_output if print_output_here == "": @@ -647,7 +693,7 @@ class MenuAwareMonitoringStandalone: # return -1, as no smck_info can be obtained return -1 - + else: # get smck_info @@ -656,52 +702,70 @@ class MenuAwareMonitoringStandalone: # return this smck_info return smck_info - def check_compatibility_of_two_release_versions(version1,version2): - """ Check if two releases versions are compatible. + + def check_compatibility_of_two_release_versions(self,version1,version2): + """ Check if two releases versions are compatible. If they do, MAM will apply MCKs from either release in the other.""" - + project1 = "DummyProject" project2 = project1 - + if ( "-" in version1 ) and ( "-" in version2 ): project1 = version1.split("-")[0] version1 = version1.split("-")[1] project2 = version2.split("-")[0] version2 = version2.split("-")[1] - + if ( version1.startswith(version2) or version1.startswith(version2) ) and project1.upper() == project2.upper(): return True - + + def clone_mck_for_new_release(self,mck_id,project="",version=""): - + if self.connected_to_oracle == False: print "You are not connected to the database, so this function is not available." return - - if self.replica_db_connection == True: - print "You are connected to the replica database and your connection is read only, so this function is not available to you." + + if self.readonly_db_connection == True: + print "Your connection is read only, so this function is not available to you." return - + # check mck exists if not self.oi.check_if_mck_id_exists(mck_id): print "MCK",mck_id,"doesn't exist." return - + if not project or not version: print "Please give an AtlasProject and an AtlasVersion to assign to the cloned MCK" print "Usage: clone_mck_for_new_release(mck_id,project,version)" return - - # get the original mck info + + requested_release = project + "-" + version + + # get the original mck info mck_info = self.oi.read_mck_info_from_db(mck_id) smck_ids = self.oi.read_mck_links_from_db(mck_id) - - # alter it + + # check if mck already exists with the requested release + search_results = self.oi.check_if_mck_exists(smck_ids) + # first check if links were found + if search_results != -1: + # loop over possible matches + for row_mck_id in search_results: + # check if this is an exact match + if len(self.oi.read_mck_links_from_db(row_mck_id)) == len(smck_ids): + # if so, check the release + if self.oi.read_mck_info_from_db(row_mck_id)['MCK_ATHENA_VERSION'] == requested_release: + print "MCK",row_mck_id,"is an existing clone of MCK",mck_id,"valid in",requested_release + print "A new clone will not be created." + return + + # alter the mck info mck_info['MCK_COMMENT'] = "Clone of MCK " + str(mck_id) + " originally for " + mck_info['MCK_ATHENA_VERSION'] - mck_info['MCK_ATHENA_VERSION'] = project + "-" + version + mck_info['MCK_ATHENA_VERSION'] = requested_release del mck_info['MCK_CREATION_DATE'] del mck_info['MCK_ID'] - + # create the new mck, this modifies mck_info self.oi.upload_mck(mck_info) @@ -709,13 +773,12 @@ class MenuAwareMonitoringStandalone: # link this new mck to the original smck_ids for smck_id in smck_ids: - - # upload this link self.oi.upload_mck_smck_link(clone_mck_id,smck_id) - + print "Cloned MCK",mck_id,"to create MCK",int(clone_mck_id) print mck_info + def upload_mck(self,input_smck_list=[],comment="",print_output_here=""): """input_smck_list should be a list of SMCK, identified be either their SMCK_ID or SMCK_TOOL_PATCH_VERSION. An MCK will be uploaded, linking to these SMCK. @@ -724,11 +787,11 @@ class MenuAwareMonitoringStandalone: if self.connected_to_oracle == False: print "You are not connected to the database, so this function is not available." return - - if self.replica_db_connection == True: - print "You are connected to the replica database and your connection is read only, so this function is not available to you." + + if self.readonly_db_connection == True: + print "Your connection is read only, so this function is not available to you." return - + # check for empty print_output_here # if it is found, use self.print_output if print_output_here == "": @@ -748,19 +811,16 @@ class MenuAwareMonitoringStandalone: # loop over input for smck_identifier in input_smck_list: + if smck_identifier not in smck_ids: + smck_id = self.__get_smck_id_from_smck_identifier__(smck_identifier) - # get smck_id - smck_id = self.__get_smck_id_from_smck_identifier__(smck_identifier) - - # if an smck_id has not been found - if smck_id == -1: + # if an smck_id has not been found + if smck_id == -1: + if print_output_here: + print "Problem with requested SMCK",smck_identifier - # info for user - if print_output_here: - print "Problem with requested SMCK",smck_identifier - - # add this smck_id to the smck_ids list - smck_ids.append(smck_id) + # smck is real and not already in the list, so add it + smck_ids.append(smck_id) # check if all smck_id have been found @@ -776,20 +836,20 @@ class MenuAwareMonitoringStandalone: smck_release_dict = {} for smck_id in smck_ids: - smck_release = self.oi.read_smck_info_from_db(smck_id)['SMCK_ATHENA_VERSION'] + smck_release = self.oi.read_smck_info_from_db(smck_id)['SMCK_ATHENA_VERSION'] if smck_release in smck_release_dict: smck_release_dict[smck_release].append(smck_id) else: smck_release_dict[smck_release] = [smck_id] - + if len(smck_release_dict) > 1: - print "Your SMCK list contains SMCKs from different Athena releases." - print "MCKs must consist of SMCKs from the same release." - print "Please try again with a different SMCK list." + print "Your SMCK list contains SMCKs from different Athena releases." + print "MCKs must consist of SMCKs from the same release." + print "Please try again with a different SMCK list." print "The SMCKs you entered with their releases will be printed below." print smck_release_dict return - + # check that the mck does not already exist mck_id = self.oi.check_if_exact_mck_exists(smck_ids) @@ -804,7 +864,7 @@ class MenuAwareMonitoringStandalone: # if no comment is provided, then ask for one if comment=="": comment = self.__ask_for_comment__() - + # fill mck_info mck_info = {} mck_info['MCK_DEFAULT'] = 0 @@ -824,21 +884,114 @@ class MenuAwareMonitoringStandalone: # info for user if print_output_here: - print "This MCK has been uploaded. It is MCK",mck_id + print "This MCK has been uploaded. It is MCK",int(mck_id) + + + def upload_mck_with_specified_release(self,input_smck_list=[],athena_release="",comment="",print_output_here=""): + """input_smck_list should be a list of SMCK, identified be either their SMCK_ID or SMCK_TOOL_PATCH_VERSION. + An MCK will be uploaded, linking to these SMCK. + The release that will be assigned to this MCK must be specified. + Optional comment can be provided.""" + + if self.connected_to_oracle == False: + print "You are not connected to the database, so this function is not available." + return + + if self.readonly_db_connection == True: + print "Your connection is read only, so this function is not available to you." + return + + # check for empty print_output_here + # if it is found, use self.print_output + if print_output_here == "": + print_output_here = self.print_output + + if input_smck_list == []: + if print_output_here: + print "No list of SMCK has been provided." + print "No MCK upload is possible without a list of SMCKs." + return + + if athena_release == "": + if print_output_here: + print "No release has been specified." + print "No MCK upload is possible without a specified release." + return + + # holder for smck_ids + smck_ids = [] + + # loop over input + for smck_identifier in input_smck_list: + + # get smck_id + smck_id = self.__get_smck_id_from_smck_identifier__(smck_identifier) + + # if an smck_id has not been found + if smck_id == -1: + + # info for user + if print_output_here: + print "Problem with requested SMCK",smck_identifier + + # add this smck_id to the smck_ids list + smck_ids.append(smck_id) + + + # check if all smck_id have been found + # TODO - CHECK THAT THE -1 CRITERIA IS CORRECT (ie do we need '-1' instead? I don't think so) + if smck_ids.__contains__(-1): + + # one or more smck are missing. Abort mck upload + if print_output_here: + print "One or more of the SMCK requested are missing. Aborting MCK upload." + return + + # check that the mck does not already exist + mck_id = self.oi.check_if_exact_mck_exists(smck_ids) + + # if mck does already exist, then say so and exit + if mck_id != -1: + + # info for user + if print_output_here: + print "This MCK already exists. It is MCK",int(mck_id) + return + + # if no comment is provided, then ask for one + if comment=="": + comment = self.__ask_for_comment__() + + # fill mck_info + mck_info = {} + mck_info['MCK_DEFAULT'] = 0 + mck_info['MCK_ATHENA_VERSION'] = athena_release + mck_info['MCK_CREATOR'] = self.current_user + mck_info['MCK_COMMENT'] = comment + + # upload mck_info + mck_id = self.oi.upload_mck(mck_info) + + # link this mck to the smck_ids + for smck_id in smck_ids: + + # upload this link + self.oi.upload_mck_smck_link(mck_id,smck_id) + + # info for user + if print_output_here: + print "This MCK has been uploaded. It is MCK",int(mck_id) def load_local_config_from_json(self,input_json_filename="mam_configs.json"): """Load all trigger monitoring tool configurations from an input json file. Note: this does not apply the json config to running trigger-monitoring tools. - It only loads the json config into <ThisVariable>.local_global_info + It only loads the json config into <ThisVariable>.local_global_info to allow for upload to the MaM database, or testing. Existing <ThisVariable>.local_global_info will be overwritten. - Once loaded, this json config can be uploaded to the MaM database by calling - <ThisVariable>.upload_default() if this is a default config, or - <ThisVariable>.upload_all_local_changes_as_smck() if this is a patch config. - However to upload to the database from a json file, you probably want to instead use + However to upload to the database from a json file, you probably want to instead use <ThisVariable>.upload_config_from_json(input_json_filename)""" - + # create a file-like-object to read the json from input_file = open( input_json_filename , "r" ) @@ -852,40 +1005,33 @@ class MenuAwareMonitoringStandalone: input_file.close() - def upload_config_from_json(self,input_json_filename="mam_configs.json",print_output_here=""): - """Load all trigger monitoring tool configurations from an input json file, - and upload these configurations to the database. - If you only want to load configurations from a json file, - but not immediately upload to the database, then consider instead calling - <ThisVariable>.load_local_config_from_json(input_json_filename)""" + def upload_config_from_json(self,input_json_filename="mam_configs.json"): + """Load all trigger monitoring tool configurations from an input json file, + and upload these configurations to the database.""" if self.connected_to_oracle == False: print "You are not connected to the database, so this function is not available." return - - if self.replica_db_connection == True: - print "You are connected to the replica database and your connection is read only, so this function is not available to you." + + if self.readonly_db_connection == True: + print "Your connection is read only, so this function is not available to you." return - - # check for empty print_output_here - # if it is found, use self.print_output - if print_output_here == "": - print_output_here = self.print_output # first we load the json file self.load_local_config_from_json(input_json_filename) # check that there are SMCK in this upload if len(self.local_global_info['MONITORING_TOOL_DICT']) == 0: - + # info for user print "The file",input_json_filename,"contains no tool configurations to be uploaded." return # then we extract whether this is a default configuration or not - default = self.local_global_info['MCK']['MCK_DEFAULT'] + default = int(self.local_global_info['MCK']['MCK_DEFAULT']) # and we extract the comment, athena version, stream, step, and user + """ comment = self.local_global_info['MCK']['MCK_COMMENT'] athena_version = self.local_global_info['MCK']['MCK_ATHENA_VERSION'] user = self.local_global_info['MCK']['MCK_CREATOR'] @@ -893,19 +1039,47 @@ class MenuAwareMonitoringStandalone: processing_stream = value['SMCK_PROCESSING_STREAM'] processing_step = value['SMCK_PROCESSING_STEP'] break + """ # if this is a default config, then use upload_default if default == 1: - self.upload_default(comment,athena_version,user,print_output_here) - # else we use upload_all_local_changes_as_smck + print "Defaults should not be uploaded to the DB." + #self.upload_default(comment,athena_version,user,print_output_here) else: - self.upload_all_local_changes_as_smck(processing_step,processing_stream,comment,athena_version,user,print_output_here) + # depreciated: + #self.upload_all_local_changes_as_smck(processing_step,processing_stream,comment,athena_version,user,print_output_here) + + # upload this self.local_global_info to the database, and get the new mck_id and smck_ids + mck_id, smck_ids = self.oi.upload_mck_and_smck(self.local_global_info) + + # print smck info + print "The following SMCK have been created or already exist:" + print "" + + for smck_id in smck_ids: + + # get smck_info + tool_value = self.get_smck_info_from_db(smck_id) + + # print info for this smck + print "SMCK_ID =",tool_value['SMCK_ID'] + print "SMCK_TOOL_PATCH_VERSION =",tool_value['SMCK_TOOL_PATCH_VERSION'] + + # also print mck info + print "" + print "An MCK has been created or already exists, linking to the above SMCK:" + print "MCK_ID =",mck_id + print "" + + # if we are running silently, still return the new_mck_id and new_smck_ids + # (ie. in case this function has been called by another function, which might like to know the new_mck_id and new_smck_ids) + return mck_id, smck_ids def get_default_mck_id_from_db(self,input_athena_version=""): """Get the MCK number (MCK_ID) of the default for this Athena version. If input_athena_version=='', the current Athena version is used.""" - + if self.connected_to_oracle == False: print "You are not connected to the database, so this function is not available." return @@ -918,81 +1092,53 @@ class MenuAwareMonitoringStandalone: return self.oi.read_default_mck_id_from_db(input_athena_version) - def upload_default(self,comment="",athena_version="",user="",print_output_here=""): - "Upload all current trigger-monitoring tool configurations as default for this Athena version." + def get_default_from_db(self,input_athena_version="",print_output_here=""): + """Prints default MCK number (MCK_ID) for an Athena version. + If no Athena version is specified, the current Athena version being run in is used. + All default information is made available in the <ThisVariable>.default_global_info dictionary.""" + + if self.connected_to_oracle == False: + print "You are not connected to the database, so this function is not available." + return # check for empty print_output_here # if it is found, use self.print_output if print_output_here == "": print_output_here = self.print_output - # check for empty athena_version - # if it is found, use self.current_athena_version - if athena_version == "": - athena_version=self.current_athena_version + # info for user + if print_output_here: + print "Attempting to get default tool configuration from database" - # check for empty user - # if it is found, use self.current_user - if user == "": - user=self.current_user + # if no input Athena version is provided, then use the current version + if input_athena_version == "": + input_athena_version = self.current_athena_version # search for default mck - default_mck = self.get_default_mck_id_from_db(athena_version) + default_mck = self.get_default_mck_id_from_db(input_athena_version) - # if it already exists + # if a valid default mck exists if default_mck >= 0: + # info for user if print_output_here: - print "There already exists a default mck for this Athena version:" - print "Athena version: "+athena_version - print "Default MCK:",default_mck + print "Default MCK for Athena version "+input_athena_version+" is",default_mck + + # fill self.default_global_info + self.default_global_info = self.get_global_info_from_db(default_mck) + # if there is no default for this Athena version else: - # if no comment is provided, then ask for one - if comment=="": - comment = self.__ask_for_comment__() + # info for user + if print_output_here: + print "No default for Athena version "+self.current_athena_version+" has been uploaded" + print "If you are not running with any local changes to the default, then consider running the command \"<ThisVariable>.upload_default()\"" - # fill mck_info - self.local_global_info['MCK'] = {} - self.local_global_info['MCK']['MCK_DEFAULT'] = 1 - self.local_global_info['MCK']['MCK_ATHENA_VERSION'] = athena_version - self.local_global_info['MCK']['MCK_CREATOR'] = user - self.local_global_info['MCK']['MCK_COMMENT'] = comment - - # update self.local - self.__update_local_pointer__() - - # for each local tool - for tool, smck_info in self.local_global_info['MONITORING_TOOL_DICT'].iteritems(): - - # fill smck_info - smck_info['SMCK_PROCESSING_STEP'] = "ALL" - smck_info['SMCK_PROCESSING_STREAM'] = "ALL" - smck_info['SMCK_DEFAULT'] = 1 - smck_info['SMCK_CREATOR'] = user - smck_info['SMCK_COMMENT'] = comment - - # upload this self.local_global_info to the database, and get the new mck_id and smck_ids - mck_id, smck_ids = self.oi.upload_mck_and_smck(self.local_global_info) - - # info for user - if print_output_here: - print "The default for this Athena version ("+athena_version+") has been uploaded" - print "It has been given the MCK",mck_id - print "" - print "The following tools have had their current configurations uploaded as defaults:" - print "" - for smck_id in smck_ids: - smck_info = self.oi.read_smck_info_from_db(smck_id) - print "Tool:",smck_info['SMCK_TOOL_TYPE'] - print "SMCK:",smck_id - print "SMCK tool patch version:",smck_info['SMCK_TOOL_PATCH_VERSION'] - print "" def get_global_info_from_db(self,mck_id): "For an input MCK number (MCK_ID), get all related MCK and SMCK info, and return it as a dictionary." - + if self.connected_to_oracle == False: print "You are not connected to the database, so this function is not available." return @@ -1011,7 +1157,7 @@ class MenuAwareMonitoringStandalone: # get smck_info smck_info = self.oi.read_smck_info_from_db(smck_id) - # generate monitoring_tool_dict key, a combination of the tool name, + # generate monitoring_tool_dict key, a combination of the tool name, # the processing step (if not ALL), # and the processing stream (if not ALL) smck_key = smck_info['SMCK_TOOL_TYPE'] @@ -1027,133 +1173,106 @@ class MenuAwareMonitoringStandalone: return global_info - def get_default_from_db(self,input_athena_version="",print_output_here=""): - """Prints default MCK number (MCK_ID) for an Athena version. - If no Athena version is specified, the current Athena version being run in is used. - All default information is made available in the <ThisVariable>.default_global_info dictionary.""" - - if self.connected_to_oracle == False: - print "You are not connected to the database, so this function is not available." - return + def upload_default(self,comment="",athena_version="",user="",print_output_here=""): + "Upload all current trigger-monitoring tool configurations as default for this Athena version." + + print "Defaults should not be uploaded to the DB." # check for empty print_output_here # if it is found, use self.print_output if print_output_here == "": print_output_here = self.print_output - # info for user - if print_output_here: - print "Attempting to get default tool configuration from database" + # check for empty athena_version + # if it is found, use self.current_athena_version + if athena_version == "": + athena_version=self.current_athena_version - # if no input Athena version is provided, then use the current version - if input_athena_version == "": - input_athena_version = self.current_athena_version + # check for empty user + # if it is found, use self.current_user + if user == "": + user=self.current_user # search for default mck - default_mck = self.get_default_mck_id_from_db(input_athena_version) + default_mck = self.get_default_mck_id_from_db(athena_version) - # if a valid default mck exists + # if it already exists if default_mck >= 0: - # info for user if print_output_here: - print "Default mck for Athena version "+input_athena_version+" is",default_mck - - # fill self.default_global_info - self.default_global_info = self.get_global_info_from_db(default_mck) + print "There already exists a default MCK for this Athena version:" + print "Athena version: "+athena_version + print "Default MCK:",default_mck - # if there is no default for this Athena version else: - # info for user - if print_output_here: - print "No default for Athena version "+self.current_athena_version+" has been uploaded" - print "If you are not running with any local changes to the default, then consider running the command \"<ThisVariable>.upload_default()\"" - - - def __calculate_diff__(self,dict1={},dict2={},diff_all=False): - """Calculate and return a diff between two inputs. - If diff_all==True, all items that are not identical in both inputs will be returned." - If diff_all==False (default), only items that are in both inputs, and different, will be returned.""" - - # only return keys that are in both dictionaries - # for these keys, return two dictionaries, one of all the values in dict1, one of all the values in dict2 - - # dictionaries for filling and returning - return_dict1 = {} - return_dict2 = {} - - # loop over the first dictionary - for key, value1 in dict1.iteritems(): - - # if this key is in the second dictionary - if dict2.__contains__(key): - - # get the value in the second dictionary - value2 = dict2[key] - - # first check if the values are equal - # (if they are equal, then we don't want to put anything for this key into the return dictionaries) - if str(value1) != str(value2): - - # are value1 and value2 dictionaries? if so, recursively call __calculate_diff__ - if type(value1) is dict and type(value2) is dict: - - # recursively call __calculate_diff__ and fill return dictionaries - return_dict1_temp, return_dict2_temp = self.__calculate_diff__(value1,value2,diff_all) + # if no comment is provided, then ask for one + if comment=="": + comment = self.__ask_for_comment__() - # check that the output is not identical - # this catches the case when diff_all==False - # where str(value1) != str(value2) because at least one of the dicts contains a key that the other doesn't - # but all the matching keys have identical values - # which causes the return_dicts to be equal (blank) - if return_dict1_temp != return_dict2_temp: - - # fill return dictionaries - return_dict1[key] = return_dict1_temp - return_dict2[key] = return_dict2_temp + # fill mck_info + self.local_global_info['MCK'] = {} + self.local_global_info['MCK']['MCK_DEFAULT'] = 1 + self.local_global_info['MCK']['MCK_ATHENA_VERSION'] = athena_version + self.local_global_info['MCK']['MCK_CREATOR'] = user + self.local_global_info['MCK']['MCK_COMMENT'] = comment - else: + # update self.local + self.__update_local_pointer__() - # fill return dictionaries with the different values - return_dict1[key] = value1 - return_dict2[key] = value2 + # for each local tool + for tool, smck_info in self.local_global_info['MONITORING_TOOL_DICT'].iteritems(): - # else if this key is not in the second dictionary - else: + # fill smck_info + smck_info['SMCK_PROCESSING_STEP'] = "ALL" + smck_info['SMCK_PROCESSING_STREAM'] = "ALL" + smck_info['SMCK_DEFAULT'] = 1 + smck_info['SMCK_CREATOR'] = user + smck_info['SMCK_COMMENT'] = comment - #if we are adding all differences to the return dicts - if diff_all: + # upload this self.local_global_info to the database, and get the new mck_id and smck_ids + mck_id, smck_ids = self.oi.upload_mck_and_smck(self.local_global_info) - # then add this key and value to the first return dict - return_dict1[key] = value1 + # info for user + if print_output_here: + print "The default for this Athena version ("+athena_version+") has been uploaded" + print "It has been given the MCK",mck_id + print "" + print "The following tools have had their current configurations uploaded as defaults:" + print "" + for smck_id in smck_ids: + smck_info = self.oi.read_smck_info_from_db(smck_id) + print "Tool:",smck_info['SMCK_TOOL_TYPE'] + print "SMCK:",smck_id + print "SMCK tool patch version:",smck_info['SMCK_TOOL_PATCH_VERSION'] + print "" - #if we are adding all differences to the return dicts - if diff_all: - # loop over the second dictionary - for key, value2 in dict2.iteritems(): + def get_default_from_json(self,input_json_filename=""): + """Get a default configuration from a json file and store it as self.default_global_info""" - # if this key is not in the first dictionary - if not dict1.__contains__(key): + # create a file-like-object to read the json from + input_file = open( input_json_filename , "r" ) - # then add this key and value to the second return dict - return_dict2[key] = value2 + # json decode the local global info, and pass it to self.local_global_info, converting unicode to str + self.default_global_info = self.oi.__unicode_to_str__( json.load(input_file) ) - # return the dictionaries - return return_dict1, return_dict2 + # close the input file + input_file.close() def upload_all_local_changes_as_smck(self,processing_step="",processing_stream="",comment="",athena_version="",user="",print_output_here=""): """Upload all local configuration changes wrt the default. Optional processing step, stream, and comment can be provided.""" - + + print "This function is depreciated, please use mam.make_patch_json and mam.ms.upload_config_from_json instead." + if self.connected_to_oracle == False: print "You are not connected to the database, so this function is not available." return - - if self.replica_db_connection == True: - print "You are connected to the replica database and your connection is read only, so this function is not available to you." + + if self.readonly_db_connection == True: + print "Your connection is read only, so this function is not available to you." return # check for empty print_output_here @@ -1183,9 +1302,6 @@ class MenuAwareMonitoringStandalone: print "If you are not running with any local changes to the default, then consider running the command \"<ThisVariable>.upload_default()\"" return - # get all local tool info - #self.get_current_local_info() - # if no tools are running locally if len(self.local_global_info['MONITORING_TOOL_DICT']) == 0: @@ -1198,12 +1314,10 @@ class MenuAwareMonitoringStandalone: return # get default from database - # (should already have been done during __init__, - # but in case the default has only been uploaded in this session then we check again) self.get_default_from_db(athena_version) # create diff of global_info - # we want diffed_global_info2, + # we want diffed_global_info2, # which is the 'patch' to apply to the default to get the current local configuration diffed_global_info1, diffed_global_info2 = self.__calculate_diff__(self.default_global_info,self.local_global_info,False) @@ -1234,11 +1348,11 @@ class MenuAwareMonitoringStandalone: diffed_global_info2['MCK']['MCK_CREATOR'] = user diffed_global_info2['MCK']['MCK_COMMENT'] = comment - # in case we want to remove any diffed_global_info2['MONITORING_TOOL_DICT'] items, we must make a list of the keys, + # in case we want to remove any diffed_global_info2['MONITORING_TOOL_DICT'] items, we must make a list of the keys, # and then delete these keys after we have finished iterating over diffed_global_info2['MONITORING_TOOL_DICT'] # It is not possible to delete elements while iterating over a list or dict keys_to_delete = [] - + # fill extra smck_info for all tools for tool_key, tool_value in diffed_global_info2['MONITORING_TOOL_DICT'].iteritems(): @@ -1287,14 +1401,11 @@ class MenuAwareMonitoringStandalone: # info for user if print_output_here: - + # print smck info - print "The following SMCK have been created:" + print "The following SMCK have been created or already exist:" print "" - # loop over filled dictionary diffed_global_info2['MONITORING_TOOL_DICT'] as it contains all the new_smck_ids but also the new smck_tool_patch_version - # Edit: no, it seems not to. Where did the pointers break? - #for tool_key, tool_value in diffed_global_info2['MONITORING_TOOL_DICT'].iteritems(): for smck_id in new_smck_ids: # get smck_info @@ -1306,7 +1417,7 @@ class MenuAwareMonitoringStandalone: # also print mck info print "" - print "For convenience, an MCK has also been created automatically, linking to the above SMCK." + print "An MCK has been created or already exists, linking to the above SMCK:" print "MCK_ID =",new_mck_id print "" @@ -1315,20 +1426,21 @@ class MenuAwareMonitoringStandalone: else: return new_mck_id, new_smck_ids + def search(self,flag1="",input1="",print_output_here=""): """Search the Oracle database for something. input1 is is what is to be searched for. flag1 specifies what kind of input input1 is.""" - + if self.connected_to_oracle == False: print "You are not connected to the database, so this function is not available." return - + # check for empty print_output_here # if it is found, use self.print_output if print_output_here == "": print_output_here = self.print_output - + # search for an input # there should be an input and a flag # the flag can be any one of the columns found in the SQL schema @@ -1336,13 +1448,13 @@ class MenuAwareMonitoringStandalone: # if the flag matches a column, then then that table is searched for rows where input_value=flag_column # such rows are then converted into dictionaries, one dict per row # these dicts are then printed using self.__print_one_dict__(row_dict,"",row_name) - + # get SQL table and column names database_column_list = self.oi.get_db_tables_and_columns() - + # if the input or flag are missing if flag1 == "": - + #info for user if print_output_here: print "search takes two arguments: flag, input." @@ -1357,153 +1469,157 @@ class MenuAwareMonitoringStandalone: print "'SMCK_SLICE_TYPE','Muon'" print "" print "Recognised flags are:" - + # print available columns for row in database_column_list: - + # print this column name, with a preceding - print row['COLUMN_NAME'] - + # nice spacing for the user print "" print "You can leave the input blank to print all entries for the flag, but you must enter a flag." - + # if we do not have the flag, then we can not search, and so must exit return - + + if flag1.upper() == 'SMK_ID': + # the DB uses Super Master Table ID instead of Super Master Key, we accept either + flag1 = 'SMT_ID' + # check if the flag is a valid column # flag to indicate that we've matched the flag to a column column_match = False - + # loop over columns for row in database_column_list: - + # check if this column matches if flag1.upper() == row['COLUMN_NAME']: - + # then extract the table name and column name table_name = row['TABLE_NAME'] column_name = row['COLUMN_NAME'] - + # and set column_match and break out of this loop column_match = True break - + # check that we have found the column if not column_match: - + #info for user if print_output_here: print "The flag '"+flag1+"' has not been recognised as a valid flag. Recognised flags are:" - + # print available columns for row in database_column_list: - + # print this column name, with a preceding - print "-"+row['COLUMN_NAME'] - + # no valid flag (column) means we can not search, and so must exit return - + # if we've got to here then we should have a value input1, and a table_name and column_name # if we want to check for naughty input (SQL injection) in input1, then here is the place to do it - + # we can't search for CLOB objects, ie. smck_config # so if the user is attempting to search for smck_config, then warn them # the input they have provided could be hashed, and the hash could be searched for if flag1 == "smck_config": - + # info for the user if print_output_here: - + # explain problem print "" print "You have attempted to search for an SMCK_CONFIG." print "Unfortunately, due to limitations in Oracle, this is not possible." print "This is because the SMCK_CONFIG is stored as a CLOB, and in Oracle it is not possible to perform SQL query comparisons to CLOB objects, (WHERE SMCK_CONFIG = user_input)." - + # explain the alternative print "" print "To allow for SMCK_CONFIG comparisons, each config is hashed (using sha512) and hashes are compared." print "Your input will now be hashed, and that hash will be searched for." print "As this may not be exactly what you intended, do take care when interpreting these search results." print "" - + # convert the flag flag1 = 'smck_config_hash' - + # convert the input to a hash input1 = self.__get_config_hash__(input1) - - + + # now lets search search_results_list = self.oi.column_search(input1,table_name,column_name) - + # and print our results in a neat manner if print_output_here: - + # remind the user what was searched for print "" print input1,"has been searched for in the column",column_name,"of table",table_name,"in the menu-aware monitoring Oracle database." print "" - + # if no results have been found if len(search_results_list) == 0: - + # let the user know print "No results have been found." print "" return - + # else if results have been found print "The following",len(search_results_list),"results have been found:" - + # loop over the search results for n, row_dict in enumerate(search_results_list): - + # create a name for the row # this is just "Result_n" where n is the row number row_name = "Result_"+str(n) - + # recursively print the dictionary print "" print row_name+":" self.__print_one_dict__(row_dict," ",row_name) - + # added bonus for the user # if this is an mck table, then we shall find and print out the list of smck that this mck links to if table_name == "MCK_TABLE": - + # get the smck_ids smck_ids = self.oi.read_mck_links_from_db(row_dict['MCK_ID']) - + # info for user print "" if smck_ids is -1: print row_name,"is an MCK which links to no SMCKs. This MCK is empty." else: print row_name,"is an MCK, and links to the following SMCK:" - + # first print the list of SMCK_ID print "SMCK_ID list =",smck_ids - + # find the longest length of each of the three strings we want to print below id_ljust = 0 version_ljust = 0 slice_ljust = 0 - + # temporary dict to hold smck_info so that we don't have to get them a second time smck_info_dict = {} - + # for each smck_id for smck_id in smck_ids: - + # get the smck_info smck_info = self.oi.read_smck_info_from_db(smck_id) - + # add it to smck_info_dict smck_info_dict[smck_id] = smck_info - + # find the longest strings if len(str(smck_id)) > id_ljust: id_ljust = len(str(smck_id)) @@ -1511,43 +1627,116 @@ class MenuAwareMonitoringStandalone: version_ljust = len(str(smck_info['SMCK_TOOL_PATCH_VERSION'])) if len(str(smck_info['SMCK_SLICE_TYPE'])) > slice_ljust: slice_ljust = len(str(smck_info['SMCK_SLICE_TYPE'])) - + # now we print stuff # for each smck_id for smck_id in smck_ids: - + # get the smck_info from the above dictionary smck_info = smck_info_dict[smck_id] - + # print some formatted info on this smck print "[ SMCK_ID = "+str(smck_id).ljust(id_ljust)+" , SMCK_TOOL_PATCH_VERSION = "+str(smck_info['SMCK_TOOL_PATCH_VERSION']).ljust(version_ljust)+" , SMCK_SLICE_TYPE = "+str(smck_info['SMCK_SLICE_TYPE']).ljust(slice_ljust)+" ]" - + # some nice spacing print "" - else: - # return the search results - return search_results_list - + # return the search results + return search_results_list + + + def __calculate_diff__(self,dict1={},dict2={},diff_all=False): + """Calculate and return a diff between two inputs. + If diff_all==True, all items that are not identical in both inputs will be returned." + If diff_all==False (default), only items that are in both inputs, and different, will be returned.""" + + # only return keys that are in both dictionaries + # for these keys, return two dictionaries, one of all the values in dict1, one of all the values in dict2 + + # dictionaries for filling and returning + return_dict1 = {} + return_dict2 = {} + + # loop over the first dictionary + for key, value1 in dict1.iteritems(): + + # if this key is in the second dictionary + if dict2.__contains__(key): + + # get the value in the second dictionary + value2 = dict2[key] + + # first check if the values are equal + # (if they are equal, then we don't want to put anything for this key into the return dictionaries) + if str(value1) != str(value2): + + # are value1 and value2 dictionaries? if so, recursively call __calculate_diff__ + if type(value1) is dict and type(value2) is dict: + + # recursively call __calculate_diff__ and fill return dictionaries + return_dict1_temp, return_dict2_temp = self.__calculate_diff__(value1,value2,diff_all) + + # check that the output is not identical + # this catches the case when diff_all==False + # where str(value1) != str(value2) because at least one of the dicts contains a key that the other doesn't + # but all the matching keys have identical values + # which causes the return_dicts to be equal (blank) + if return_dict1_temp != return_dict2_temp: + + # fill return dictionaries + return_dict1[key] = return_dict1_temp + return_dict2[key] = return_dict2_temp + + else: + + # fill return dictionaries with the different values + return_dict1[key] = value1 + return_dict2[key] = value2 + + # else if this key is not in the second dictionary + else: + + #if we are adding all differences to the return dicts + if diff_all: + + # then add this key and value to the first return dict + return_dict1[key] = value1 + + #if we are adding all differences to the return dicts + if diff_all: + + # loop over the second dictionary + for key, value2 in dict2.iteritems(): + + # if this key is not in the first dictionary + if not dict1.__contains__(key): + + # then add this key and value to the second return dict + return_dict2[key] = value2 + + # return the dictionaries + return return_dict1, return_dict2 + + def __print_one_dict__(self,dict1={},tab_space="",name1=""): "Print a dictionary in a neatly formatted manner." - + # loop over the keys (and values) in dict1 for key, value1 in dict1.iteritems(): - + # if the value is a dict if type(value1) is dict: - + # first print this dict key print tab_space+name1+"['"+key+"']:" - + # recursively call this function # add some space to tab_space, to indent the sub-dictionary self.__print_one_dict__(value1," "+tab_space,name1) - + # if the value is a list elif type(value1) is list: - + # print the items nicely (no unicode u' features) print tab_space+name1+"['"+key+"'] = [", for n, item in enumerate(value1): @@ -1558,94 +1747,89 @@ class MenuAwareMonitoringStandalone: if n != len(value1)-1: print ",", print "]" - + # else if this value is not a dict or a list, then we should print else: print tab_space+name1+"['"+key+"'] =",value1 - def print_all_mck_to_smk_links(self,print_deactivated_links=False): - """Print MCK to SMK links. By default prints only active links. - print_all_mck_to_smk_links(True) to print all links.""" - - if self.connected_to_oracle == False: - print "You are not connected to the database, so this function is not available." + + def create_sqlite_file_to_copy_to_cool(self,mck,run,runend="",info="",project="",version=""): + """Create ad sqlite file which can be used to manually add data to COOL""" + # this way https://twiki.cern.ch/twiki/bin/view/AtlasComputing/CoolPublishing#Updating_data_on_the_online_data + + if runend and run >= runend: + print "runend must be greater than run" return - - # get list of all mck_to_smk links - mck_to_smk_links = [] - mck_to_smk_links = self.oi.get_all_mck_to_smk_links() - - # reverse the list in place - mck_to_smk_links.reverse() - - # loop over the list in reverse (lowest smk to largest smk) - for link in mck_to_smk_links: - - # print the results - if link['ACTIVE'] == '1': - print "SMK",link['SMK'],"is linked to MCK",link['MCK'],"(ACTIVE LINK)" - elif print_deactivated_links: - print "SMK",link['SMK'],"was previously linked to MCK",link['MCK'],"(DEACTIVATED LINK)" - def print_all_mck_to_smk_links_old(self): - "Print all MCK to SMK links (unexplicit links version)." - - if self.connected_to_oracle == False: - print "You are not connected to the database, so this function is not available." + # check mck exists + if mck and not self.oi.check_if_mck_id_exists(mck): + print "MCK",mck,"doesn't exist." + return + + if mck == 0 and ( not project or not version ): + print "Please give the AtlasProject and AtlasVersion that you want to write MCK 0 to COOL for." + print "Usage: create_sqlite_file_to_copy_to_cool(mck,run,runend,info,project,version)" + print "project and version are compulsory for MCK 0." return - - # get list of all mck_to_smk links - mck_to_smk_links = [] - mck_to_smk_links = self.oi.get_all_mck_to_smk_links() - - # reverse the list in place - mck_to_smk_links.reverse() - - # loop over the list in reverse (lowest smk to largest smk) - for n, link in enumerate(mck_to_smk_links): - - # get some values - start_smk = link['SMK'] - end_smk = -1 - mck_id = link['MCK'] - - # if this in an active link, then we want to print this information - print_active = "" - if link['ACTIVE'] == '1': - print_active = "ACTIVE" - - # if this is not the last element in the list, then get one less than the next smk - if n+1 < len(mck_to_smk_links): - - # variables for looping over SMKs (since the list will include deactivated links - found_next_smk = False - i = 1 - - # loop over links - while not found_next_smk: - - if n+i == len(mck_to_smk_links): - # we're at the end of the list, so end the loop - end_smk = -1 - found_next_smk = True - break - - # next SMK in list - next_smk = mck_to_smk_links[n+i]['SMK'] - - # if it's still the same SMK - if next_smk == start_smk: - # go to the next one - i = i+1 - # else we've actually found the next SMK - else: - end_smk = next_smk - 1 - found_next_smk = True - - # print the results - if end_smk != -1: - print "SMK",start_smk,"to SMK",end_smk,"= MCK",mck_id,print_active - else: - print "SMK",start_smk,"and greater = MCK",mck_id,print_active + if mck and ( project or version ): + print "The AtlasProject and AtlasVersion are taken from the MCK info, and should not be specified manually except for MCK 0." + print "If you want to assign an MCK to a new release consider using the function mam.ms.clone_mck_for_new_release(mck_id,project="",version="")" + print "Continuing but ignoring project and version variables..." + # TO ADD: check that run exists + # something similar to http://acode-browser.usatlas.bnl.gov/lxr/ident?_i=getOnlineRun + + from PyCool import cool + + # Fodler name, description, versioning mode, database spec + folder_name = "/TRIGGER/HLT/MenuAwareMonConfigKey" + desc = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header service_type="71" clid="40774348" /></addrHeader><typeName>AthenaAttributeList</typeName>' + fversion = cool.FolderVersioning.MULTI_VERSION + dbspec = "sqlite://;schema=MCKfolderforCOOL.sqlite;dbname=CONDBR2" + + # column definition + schema = [('MonConfigKey', cool.StorageType.UInt32), # the mck + ('Info', cool.StorageType.String4k)] # optional info (currently unused) + + # create sqlite file + print 'Creating sqlite file' + dbSvc = cool.DatabaseSvcFactory.databaseService() + try: + db = dbSvc.createDatabase(dbspec) + except: + print 'File already exists, please remove/rename' + return + + # setup record and folder specs + rspec = cool.RecordSpecification() + for col, type in schema: + rspec.extend(col, type) + fspec = cool.FolderSpecification(fversion, rspec) + + # make folder + print 'Creating folder', folder_name + folder = db.createFolder(folder_name, fspec, desc, True) + + # make channel 0 + print 'Creating channel 0 for folder', folder_name + chan = 0 + folder.createChannel(chan, 'MonConfigKey', 'MCKs tagged by release') + + # setup the payload and tag + data = cool.Record(rspec) + data['MonConfigKey'] = mck + data['Info'] = info + if mck: + release = self.oi.read_mck_info_from_db(mck)['MCK_ATHENA_VERSION'] + else: + release = project + '-' + version + tag = folder_name.split('/')[-1] + '-' + release + + iov_since = (int(run)<<32) + if runend: + iov_until = (int(runend+1)<<32) + else: + iov_until = (int(run+1)<<32) + print "Storing MCK",mck,"with tag",tag,"for IOV",iov_since,"to",iov_until + folder.storeObject(iov_since,iov_until,data,chan,tag) diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/OracleInterface.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/OracleInterface.py index b0af54a17a67..1a1a0ae00d8b 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/OracleInterface.py +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/OracleInterface.py @@ -14,7 +14,8 @@ import json class OracleInterface: "An interface to the Oracle database for Menu-aware Monitoring." - def connect_to_oracle(self,database_username="",database_password="",database_name="",directory=""): + def connect_to_oracle(self,database_username,database_password,database_name,directory=""): + "Connect to Oracle directly" # connection information USER = database_username @@ -24,27 +25,36 @@ class OracleInterface: # connect to the oracle database self.conn = cx_Oracle.connect(USER, PASSWORD, DATASOURCE) self.cursor = self.conn.cursor() + # optional directory in which to find the MaM schema in a database self.directory = directory if self.directory != "": self.directory = self.directory+"." + def connect_to_oracle_via_frontier(self,frontier_server,schema,directory=""): + "Connect to Oracle via Frontier" + + from TrigConfigSvc.TrigConfFrontier import getFrontierCursor + + self.cursor = getFrontierCursor(frontier_server,schema) + + self.directory = directory + if self.directory != "": + self.directory = self.directory+"." def disconnect_from_oracle(self): - # close oracle connection - #self.conn.close() - try: + try: self.conn.close() return True - except cx_Oracle.InterfaceError: + except cx_Oracle.InterfaceError: return False - else: + else: return False def __unicode_to_str__(self,input1=""): "Input a unicode string, list, or dict, and convert all unicode to str." - + # test for the type of input1 # if need be, recursively call this function # where unicode is found, convert it to str @@ -66,48 +76,60 @@ class OracleInterface: def fetch(self, query, parameters_dict = {}): - # fetch results, based on a query, + # fetch results, based on a query, # optionally providing additional parameters as a dictionary, # and return all results result = [] self.cursor.execute(query,parameters_dict) - for value in self.cursor: - if len(value) > 1: - tempvalue = {} - listvalue = list(value) - for x in range(len(value)): - if type(value[x]) is cx_Oracle.LOB: - # convert this into a string now in order to prevent access errors which occur with fetchall() - listvalue[x] = self.__unicode_to_str__(json.loads(value[x].read())) - value = tuple(listvalue) - result.append(value) + try: + for value in self.cursor: + if len(value) > 1: + listvalue = list(value) + for x in range(len(value)): + if type(value[x]) is cx_Oracle.LOB: + # convert this into a string now in order to prevent access errors which occur with fetchall() + listvalue[x] = self.__unicode_to_str__(json.loads(value[x].read())) + value = tuple(listvalue) + result.append(value) + except: + for value in self.cursor.result: + if len(value) > 1: + listvalue = list(value) + for x in range(len(value)): + if type(value[x]) == str and 'MonitCategoryInfo' in value[x]: + # convert this into a string now in order to prevent access errors which occur with fetchall() + listvalue[x] = self.__unicode_to_str__(json.loads(value[x])) + value = tuple(listvalue) + result.append(value) return result - def insert(self, query, parameters_dict = {}): - + # insert a row, # optionally providing additional parameters as a dictionary - self.cursor.execute(query,parameters_dict) - self.conn.commit() + try: + self.cursor.execute(query,parameters_dict) + self.conn.commit() + except: + print "Insert into DB failed. Your connection is probably of the wrong type" def read_default_mck_id_from_db(self,athena_version): # return mck_id for default config for Athena version athena_version # if default does not exist, return -1 - # if there are multiple defaults for this Athena version (this should not be the case), return -2 - # EDIT: I haven't got the constraint on this to work (yet), so for now it returns the first mck_id + # if there are multiple defaults for this Athena version (this should not be the case), return -2 + # EDIT: I haven't got the constraint on this to work (yet), so for now it returns the first mck_id # construct query to search database for default config query = """SELECT """+self.directory+"""mck_table.mck_id \ FROM """+self.directory+"""mck_table \ WHERE """+self.directory+"""mck_table.mck_default = 1 \ AND """+self.directory+"""mck_table.mck_athena_version = :ATHENA_VERSION""" - + # create dictionary of input parameter parameters_dict = {} parameters_dict['ATHENA_VERSION'] = athena_version - + # perform search and extract results search_results = self.fetch(query, parameters_dict) @@ -118,7 +140,6 @@ class OracleInterface: # check that there is only one default if len(search_results) > 1: - print "Something has gone horribly wrong with the Oracle database" print "There are",len(search_results),"default mck numbers for Athena ", athena_version for row in search_results: print "default mck_id =",row[0] @@ -132,6 +153,52 @@ class OracleInterface: return default_mck + def delete_default_mck_id_from_db(self,athena_version): + # delete the default from the DB for the given athena version, including all associated SMCK + + # check for the default + default_mck = self.read_default_mck_id_from_db(athena_version) + if default_mck: + print default_mck + else: + return + + query = """DELETE FROM """+self.directory+"""mck_to_smck_link \ + WHERE """+self.directory+"""mck_to_smck_link.link_mck = :DEFAULT_MCK""" + + # create dictionary of input parameter + parameters_dict = {} + parameters_dict['DEFAULT_MCK'] = default_mck + + # perform deletion + self.cursor.execute(query, parameters_dict) + + query = """DELETE FROM """+self.directory+"""smck_table \ + WHERE """+self.directory+"""smck_table.smck_default = 1 \ + AND """+self.directory+"""smck_table.smck_athena_version = :ATHENA_VERSION""" + + # create dictionary of input parameter + parameters_dict = {} + parameters_dict['ATHENA_VERSION'] = athena_version + + # perform deletion + self.cursor.execute(query, parameters_dict) + + # construct query to search database for default config + query = """DELETE FROM """+self.directory+"""mck_table \ + WHERE """+self.directory+"""mck_table.mck_default = 1 \ + AND """+self.directory+"""mck_table.mck_athena_version = :ATHENA_VERSION""" + + # perform deletion + self.cursor.execute(query, parameters_dict) + + default_mck = self.read_default_mck_id_from_db(athena_version) + if default_mck == -1: + print 'Operation successful.' + else: + print 'Operation failed.' + + def upload_mck_and_smck(self, global_info): # upload smck_info if they are not already present in the database # upload and link mck if it is not already present in the database @@ -205,7 +272,7 @@ class OracleInterface: mck_info['MCK_CREATION_DATE'] = mck_info['MCK_CREATION_DATE'].getvalue() # return the new mck_id - return mck_info['MCK_ID'] + return mck_info['MCK_ID'] def upload_smck(self,smck_info): # check if smck_info is already in database @@ -244,8 +311,8 @@ class OracleInterface: smck_info['SMCK_ID'] = self.cursor.var(cx_Oracle.NUMBER) smck_info['SMCK_CREATION_DATE'] = self.cursor.var(cx_Oracle.TIMESTAMP) - # we must build an smck_info_to_submit, - # as smck_info contains a dict for smck_config, + # we must build an smck_info_to_submit, + # as smck_info contains a dict for smck_config, # but we need to insert a json string smck_info_to_submit = smck_info.copy() smck_info_to_submit['SMCK_CONFIG'] = json.dumps(smck_info['SMCK_CONFIG'], ensure_ascii=True, sort_keys=True) @@ -259,7 +326,7 @@ class OracleInterface: # return the new smck_id return smck_info['SMCK_ID'] - + # if the smck_info is already in database else: @@ -326,13 +393,13 @@ class OracleInterface: # construct a query with a dummy 1=1 so that all subsequent lines start with AND query = "SELECT "+self.directory+"mck_table.mck_id FROM "+self.directory+"mck_table WHERE 1=1 " - + # for each smck_id, find mck that link to that smck # returned mck_id must link to all smck for smck_id in smck_ids: - + # add a sub-query for each smck - query += "AND "+self.directory+"mck_table.mck_id IN (SELECT "+self.directory+"mck_to_smck_link.link_mck FROM "+self.directory+"mck_to_smck_link WHERE "+self.directory+"mck_to_smck_link.link_smck = "+str(smck_id)+" ) " + query += "AND "+self.directory+"mck_table.mck_id IN (SELECT "+self.directory+"mck_to_smck_link.link_mck FROM "+self.directory+"mck_to_smck_link WHERE "+self.directory+"mck_to_smck_link.link_smck = "+str(smck_id)+" ) " # search with this query search_results = self.fetch(query) @@ -348,10 +415,16 @@ class OracleInterface: return return_list else: return -1 - + def check_if_mck_id_exists(self,mck_id): # check if an mck exists with the requested mck_id + if not isinstance(mck_id, int): + return False + + if mck_id == 0: + return False + # construct a query query = "SELECT "+self.directory+"mck_table.mck_id FROM "+self.directory+"mck_table WHERE mck_id = "+str(mck_id) @@ -387,7 +460,7 @@ class OracleInterface: # perform the search search_results = self.fetch(query,parameters_dict) - + # if there are results, return them (smck_id) # otherwise return -1 if len(search_results) > 0: @@ -446,14 +519,14 @@ class OracleInterface: # perform the search search_results = self.fetch(query,parameters_dict) - # if there are results, return them + # if there are results, return them # otherwise return -1 if len(search_results) > 0: # create mck_info with returned data mck_info = {} mck_info['MCK_ID'] = search_results[0][0] - mck_info['MCK_DEFAULT'] = search_results[0][1] + mck_info['MCK_DEFAULT'] = search_results[0][1] mck_info['MCK_ATHENA_VERSION'] = search_results[0][2] mck_info['MCK_CREATOR'] = search_results[0][3] mck_info['MCK_CREATION_DATE'] = search_results[0][4] @@ -461,7 +534,7 @@ class OracleInterface: # return filled mck_info return mck_info - + else: return -1 @@ -480,7 +553,7 @@ class OracleInterface: # perform the search search_results = self.fetch(query,parameters_dict) - # if there are results, return them + # if there are results, return them # otherwise return -1 if len(search_results) > 0: @@ -493,7 +566,7 @@ class OracleInterface: smck_info['SMCK_PROCESSING_STEP'] = search_results[0][4] smck_info['SMCK_PROCESSING_STREAM'] = search_results[0][5] #smck_info['SMCK_CONFIG'] = json.loads(search_results[0][6].read()) # read() required to extract CLOB data - smck_info['SMCK_CONFIG'] = search_results[0][6] + smck_info['SMCK_CONFIG'] = search_results[0][6] smck_info['SMCK_CONFIG_HASH'] = search_results[0][7] smck_info['SMCK_DEFAULT'] = search_results[0][8] smck_info['SMCK_ATHENA_VERSION'] = search_results[0][9] @@ -504,7 +577,7 @@ class OracleInterface: # return filled smck_info return smck_info - + else: return -1 @@ -562,7 +635,7 @@ class OracleInterface: FROM """+self.directory+"""smck_table \ WHERE """+self.directory+"""smck_table.smck_tool_type=:INPUT_TOOL_TYPE \ ORDER BY """+self.directory+"""smck_table.smck_id DESC """ - + # construct the dict of the input smck_id parameters_dict = {} parameters_dict['INPUT_TOOL_TYPE'] = input_tool_type @@ -587,36 +660,33 @@ class OracleInterface: # get data on all columns, and associated table names and column data # hard-code table names for now, until a better solution is found - table_names = ['MCK_TABLE','SMCK_TABLE','MCK_TO_SMCK_LINK','MCK_TO_SMK_LINK'] + table_names = ['MCK_TABLE','SMCK_TABLE','MCK_TO_SMCK_LINK','MCK_TO_SMK_LINK','SUPER_MASTER_TABLE'] - # return a list of dictionaries return_list = [] - # loop over tables - for table in table_names: - - # construct a query - query = "SELECT * FROM "+self.directory+table + try: + for table in table_names: - # perform the search - # note that this is a custom search command / magic - search_results = self.cursor.execute(query).description + query = "SELECT * FROM "+self.directory+table + search_results = self.cursor.execute(query).description - # loop over rows in search results - for row in search_results: + # loop over rows in search results + for row in search_results: - # construct a dictionary of the results - row_dict = {} - row_dict['TABLE_NAME'] = table.upper() - row_dict['COLUMN_NAME'] = row[0].upper() - row_dict['DATA_TYPE'] = row[1] - row_dict['DATA_LENGTH'] = row[2] + # construct a dictionary of the results + row_dict = {} + row_dict['TABLE_NAME'] = table.upper() + row_dict['COLUMN_NAME'] = row[0].upper() + row_dict['DATA_TYPE'] = row[1] + row_dict['DATA_LENGTH'] = row[2] - # add this dictionary to the return list - return_list.append(row_dict) + # add this dictionary to the return list + return_list.append(row_dict) - # return the return list - return return_list + # return the return list + return return_list + except: + print "Cannot get database table and column names. Your database connection is probably of the wrong type." def get_db_tables_and_columns_old(self): @@ -678,7 +748,7 @@ class OracleInterface: # no match, so return the empty list return return_list - + # construct the dict of the various input parameters_dict = {} @@ -687,7 +757,7 @@ class OracleInterface: if input1: query = query+" WHERE "+column_name+" = :INPUT1" parameters_dict['INPUT1'] = input1 - + #query = "SELECT * FROM "+self.directory+table_name+" WHERE "+column_name+" = :INPUT1" # perform the search @@ -701,7 +771,7 @@ class OracleInterface: if schema_row['TABLE_NAME'] == table_name: # then we add this column name to our list - # we need to add it to the start of the list (insert before element 0), + # we need to add it to the start of the list (insert before element 0), # as the database table/column search function returns the results in reversed order(?) # column_list.insert(0,schema_row['COLUMN_NAME']) # it looks to me like the database table/column search function returns the columns in the right order, so do that @@ -725,7 +795,7 @@ class OracleInterface: # need to check if this is an smck, # in which case we need to turn the json CLOB into a dict if table_name == 'SMCK_TABLE': - + # first we read out the CLOB # then we turn the json string into a dict #row_dict['SMCK_CONFIG'] = json.loads(row_dict['SMCK_CONFIG'].read()) @@ -734,10 +804,52 @@ class OracleInterface: # add this dict to the return list return_list.append(row_dict) - + # now our list of dictionaries should be complete, so we return it return return_list + def get_smk_list(self,print_list=True): + # get all SMK IDs in a format that the GUI can handle + + query = """SELECT SMT_ID FROM """+self.directory+"""super_master_table """ + results = self.fetch(query) + smk_ids = [] + for smk in results: + smk_ids.append(smk[0]) + smk_ids.sort() + if print_list==True: + for smk in smk_ids: + print smk + return smk_ids + + def get_mck_list(self,print_list=True): + # get all MCK IDs in a format that the GUI can handle + + query = """SELECT MCK_ID FROM """+self.directory+"""mck_table """ + results = self.fetch(query) + mck_ids = [] + for mck in results: + mck_ids.append(mck[0]) + mck_ids.sort() + if print_list==True: + for mck in mck_ids: + print mck + return mck_ids + + def get_smck_list(self,print_list=True): + # get all SMCK IDs in a format that the GUI can handle + + query = """SELECT SMCK_ID FROM """+self.directory+"""smck_table """ + results = self.fetch(query) + smck_ids = [] + for smck in results: + smck_ids.append(smck[0]) + smck_ids.sort() + if print_list==True: + for smck in smck_ids: + print smck + return smck_ids + def check_if_smk_to_mck_link_exists_and_is_active(self,smk,mck): # return True of False, depending on whether this exact mck-smk link exists and is active @@ -767,20 +879,20 @@ class OracleInterface: def check_if_smk_to_mck_link_exists(self,smk,mck): # return True of False, depending on whether this exact mck-smk link exists - + # construct the query query = """SELECT * FROM """+self.directory+"""mck_to_smk_link \ WHERE """+self.directory+"""mck_to_smk_link.smk_link_mck = :MCK \ AND """+self.directory+"""mck_to_smk_link.smk_link_smk = :SMK """ - + # construct the dict of the input mck_id and smk parameters_dict = {} parameters_dict['MCK'] = mck parameters_dict['SMK'] = smk - + # perform the search search_results = self.fetch(query,parameters_dict) - + # if there are results, return True # otherwise return False if len(search_results) > 0: @@ -831,38 +943,38 @@ class OracleInterface: def deactivate_smk_mck_link(self,smk,mck): # for a given smk, deactivate all existing smk-mck links # by setting smk_link_active_mck='0' - + # make query to update links query = """ UPDATE """+self.directory+"""mck_to_smk_link \ SET """+self.directory+"""mck_to_smk_link.smk_link_active_mck = :ACTIVE_LINK \ WHERE """+self.directory+"""mck_to_smk_link.smk_link_smk = :SMK \ AND """+self.directory+"""mck_to_smk_link.smk_link_mck = :MCK """ - + # create dictionary of input parameter parameters_dict = {} parameters_dict['SMK'] = smk parameters_dict['MCK'] = mck parameters_dict['ACTIVE_LINK'] = '0' - + # insert this into the database self.insert(query,parameters_dict) def activate_smk_mck_link(self,smk,mck): # he given smk-mck link # by setting smk_link_active_mck='1' - + # make query to update links query = """ UPDATE """+self.directory+"""mck_to_smk_link \ SET """+self.directory+"""mck_to_smk_link.smk_link_active_mck = :ACTIVE_LINK \ WHERE """+self.directory+"""mck_to_smk_link.smk_link_smk = :SMK \ AND """+self.directory+"""mck_to_smk_link.smk_link_mck = :MCK """ - + # create dictionary of input parameter parameters_dict = {} parameters_dict['SMK'] = smk parameters_dict['MCK'] = mck parameters_dict['ACTIVE_LINK'] = '1' - + # insert this into the database self.insert(query,parameters_dict) @@ -885,20 +997,20 @@ class OracleInterface: def check_if_smk_exists(self,smk_id): - + # construct the query query = """SELECT * FROM """+self.directory+"""super_master_table \ WHERE """+self.directory+"""super_master_table.smt_id = :SMK """ #smt_name - + # construct the dict of the input smk parameters_dict = {} parameters_dict['SMK'] = smk_id - + # perform the search search_results = self.fetch(query,parameters_dict) - return search_results + return search_results def upload_mck_to_smk_link(self,mck_id,smk,creator,comment=""): diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/PackagesToInterrogate.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/PackagesToInterrogate.py index 8fe05b37743b..45253c91caaf 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/PackagesToInterrogate.py +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/PackagesToInterrogate.py @@ -14,17 +14,17 @@ class PackagesToInterrogate: PackagesToInterrogate = { "General" : { "PackageName" : "TrigHLTMonitoring.TrigHLTMonitoringConfig", "ToolName" : "HLTGeneralTool", "MonitCategoryName" : "", "Dll" : "TrigHLTMonitoring"}, "Bjet" : { "PackageName" : "TrigBjetMonitoring.TrigBjetMonitoringConfig", "ToolName" : "TrigBjetMonitoringConfig", "MonitCategoryName" : "TrigBjetMonitoring.TrigBjetMonitCategory", "Dll" : "TrigBjetMonitoring"}, - "Bphys" : { "PackageName" : "TrigBphysMonitoring.TrigBphysMonitoringConfig", "ToolName" : "TrigBphysMonitoringTool", "MonitCategoryName" : "TrigBphysMonitoring.TrigBphysMonitCategory", "Dll" : "TrigBphysMonitoring"}, - "Calo" : { "PackageName" : "TrigCaloMonitoring.TrigCaloMonitoringConfig", "ToolName" : "HLTCaloMonitoringTool", "MonitCategoryName" : "", "Dll" : "TrigCaloMonitoring"}, - "Egamma" : { "PackageName" : "TrigEgammaMonitoring.TrigEgammaMonitoringConfig", "ToolName" : "TrigEgammaMonitoringTool", "MonitCategoryName" : "TrigEgammaMonitoring.TrigEgammaMonitCategory", "Dll" : "TrigEgammaAnalysisTools"}, - "IDJpsi" : { "PackageName" : "TrigIDJpsiMonitoring.TrigIDJpsiMonitoringConfig", "ToolName" : "TrigIDJpsiMonitoringTool", "MonitCategoryName" : "", "Dll" : "TrigIDJpsiMonitoring"}, - "IDtrk" : { "PackageName" : "TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig", "ToolName" : "TrigIDtrkMonitoringTool", "MonitCategoryName" : "", "Dll" : "TrigInDetAnalysisExample"}, - "Jet" : { "PackageName" : "TrigJetMonitoring.TrigJetMonitoringConfig", "ToolName" : "TrigJetMonitoringTool", "MonitCategoryName" : "TrigJetMonitoring.TrigJetMonitCategory", "Dll" : "TrigJetMonitoring"}, - "MET" : { "PackageName" : "TrigMETMonitoring.TrigMETMonitoringConfig", "ToolName" : "HLTMETMonitoringTool", "MonitCategoryName" : "TrigMETMonitoring.TrigMETMonitCategory", "Dll" : "TrigMETMonitoring"}, - "MinBias" : { "PackageName" : "TrigMinBiasMonitoring.TrigMinBiasMonitoringConfig", "ToolName" : "TrigMinBiasMonitoringTool", "MonitCategoryName" : "TrigMinBiasMonitoring.TrigMinBiasMonitCategory", "Dll" : "TrigMinBiasMonitoring"}, - "Muon" : { "PackageName" : "TrigMuonMonitoring.TrigMuonMonitoringConfig", "ToolName" : "TrigMuonMonitoringTool", "MonitCategoryName" : "TrigMuonMonitoring.TrigMuonMonitCategory", "Dll" : "TrigMuonMonitoring"}, + "Bphys" : { "PackageName" : "TrigBphysMonitoring.TrigBphysMonitoringConfig", "ToolName" : "TrigBphysMonitoringTool", "MonitCategoryName" : "TrigBphysMonitoring.TrigBphysMonitCategory", "Dll" : "TrigBphysMonitoring"}, + "Calo" : { "PackageName" : "TrigCaloMonitoring.TrigCaloMonitoringConfig", "ToolName" : "HLTCaloMonitoringTool", "MonitCategoryName" : "", "Dll" : "TrigCaloMonitoring"}, + "Egamma" : { "PackageName" : "TrigEgammaMonitoring.TrigEgammaMonitoringConfig", "ToolName" : "TrigEgammaMonitoringTool", "MonitCategoryName" : "TrigEgammaMonitoring.TrigEgammaMonitCategory", "Dll" : "TrigEgammaAnalysisTools"}, + "IDJpsi" : { "PackageName" : "TrigIDJpsiMonitoring.TrigIDJpsiMonitoringConfig", "ToolName" : "TrigIDJpsiMonitoringTool", "MonitCategoryName" : "", "Dll" : "TrigIDJpsiMonitoring"}, + "IDtrk" : { "PackageName" : "TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig", "ToolName" : "TrigIDtrkMonitoringTool", "MonitCategoryName" : "", "Dll" : "TrigInDetAnalysisExample"}, + "Jet" : { "PackageName" : "TrigJetMonitoring.TrigJetMonitoringConfig", "ToolName" : "TrigJetMonitoringTool", "MonitCategoryName" : "TrigJetMonitoring.TrigJetMonitCategory", "Dll" : "TrigJetMonitoring"}, + "MET" : { "PackageName" : "TrigMETMonitoring.TrigMETMonitoringConfig", "ToolName" : "HLTMETMonitoringTool", "MonitCategoryName" : "TrigMETMonitoring.TrigMETMonitCategory", "Dll" : "TrigMETMonitoring"}, + "MinBias" : { "PackageName" : "TrigMinBiasMonitoring.TrigMinBiasMonitoringConfig", "ToolName" : "TrigMinBiasMonitoringTool", "MonitCategoryName" : "TrigMinBiasMonitoring.TrigMinBiasMonitCategory", "Dll" : "TrigMinBiasMonitoring"}, + "Muon" : { "PackageName" : "TrigMuonMonitoring.TrigMuonMonitoringConfig", "ToolName" : "TrigMuonMonitoringTool", "MonitCategoryName" : "TrigMuonMonitoring.TrigMuonMonitCategory", "Dll" : "TrigMuonMonitoring"}, "Tau" : { "PackageName" : "TrigTauMonitoring.TrigTauMonitoringConfig", "ToolName" : "TrigTauMonitoringTool", "MonitCategoryName" : "TrigTauMonitoring.TrigTauMonitCategory", "Dll" : "TrigTauMonitoring"} } - # "Egamma" : { "PackageName" : "TrigEgammaMonitoring.TrigEgammaMonitoringConfig", "ToolName" : "HLTEgammaMonitoringTool", "MonitCategoryName" : "TrigEgammaMonitoring.TrigEgammaMonitCategory", "Dll" : "TrigEgammaMonitoring"}, + # "Egamma" : { "PackageName" : "TrigEgammaMonitoring.TrigEgammaMonitoringConfig", "ToolName" : "HLTEgammaMonitoringTool", "MonitCategoryName" : "TrigEgammaMonitoring.TrigEgammaMonitCategory", "Dll" : "TrigEgammaMonitoring"}, # "Egamma_Dump" : { "PackageName" : "TrigEgammaMonitoring.TrigEgammaMonitoringConfig", "ToolName" : "HLTEgammaMonitoringDumpTool", "MonitCategoryName" : "TrigEgammaMonitoring.TrigEgammaMonitCategory", "Dll" : "TrigEgammaMonitoring"} #} diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/ToolInterrogator.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/ToolInterrogator.py index 9c59afcb163d..09a34eb68c52 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/ToolInterrogator.py +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/ToolInterrogator.py @@ -35,23 +35,6 @@ class ToolInterrogator: print "#"*10 print "" - # nasty hacks to fix problems in HLTMonTriggerList.py in Athena 20.1.5.8 - # - # line 60, in config - # self.primary_single_ele_iso = egamma.primary_single_ele_iso - # AttributeError: 'module' object has no attribute 'primary_single_ele_iso' - # - # line 84, in config - # self.monitoring_muonEFFS = muon.monitoring_muonEFFS - # AttributeError: 'module' object has no attribute 'monitoring_muonEFFS' - # - # so we give these objects the offending attributes... - # import TrigEgammaMonitoring.TrigEgammaMonitCategory as egamma - # egamma.primary_single_ele_iso = ['e24_lhmedium_iloose_L1EM18VH','e24_lhmedium_iloose_L1EM20VH','e24_lhtight_iloose_L1EM20VH','e24_lhtight_iloose','e26_lhtight_iloose'] - # egamma.primary_single_ele_cutbased_iso = ['e24_medium_iloose_L1EM18VH','e24_medium_iloose_L1EM20VH','e24_tight_iloose_L1EM20VH','e24_tight_iloose','e26_tight_iloose'] - # import TrigMuonMonitoring.TrigMuonMonitCategory as muon - # muon.monitoring_muonEFFS = ['HLT_mu18_mu8noL1'] - # load all tools in PackagesToInterrogate for key, value in self.packages_to_interrogate.PackagesToInterrogate.iteritems(): @@ -92,7 +75,7 @@ class ToolInterrogator: if "Trig" in tool.getDlls() and "Monitoring" in tool.getDlls(): mon_tools.append(tool.getName()) # - # NEW: + # NEW: # find if Trig and Monitoring are in the tool Dll name, or if the tool name contains both 'Mon' and either 'HLT' or 'Trig' # (This change was made to catch TrigEgammaMonTool, which has 'TrigEgammaAnalysisTools' as its Dll name) # FURTHER EDIT: extra if statemend added below (7 lines below here) to catch all TrigEgammaAnalysisTools, so we can just revert back to the previous 'OLD' if statement above @@ -105,7 +88,7 @@ class ToolInterrogator: # of if the tool Dll name is TrigEgammaAnalysisTools, as is the case for the Egamma tools if 'TrigEgammaAnalysisTools' == tool.getDlls(): mon_tools.append(tool.getName()) - + # return the list of monitoring tools print "mon_tools =",mon_tools @@ -169,7 +152,7 @@ class ToolInterrogator: smck_config['MonitCategoryName'] = self.packages_to_interrogate.PackagesToInterrogate[slice_name]['MonitCategoryName'] smck_config['MonitCategoryInfo'] = self.interrogate_MonitCategory(self.packages_to_interrogate.PackagesToInterrogate[slice_name]['MonitCategoryName']) - # json dump and load, + # json dump and load, # so that any odd characters are consistently encoded in the output smck_config dictionary smck_config = json.loads( json.dumps(smck_config, ensure_ascii=True, sort_keys=True) ) @@ -185,14 +168,14 @@ class ToolInterrogator: return -1 # now we begin interrogating the tool... - + # This is the dictionary where we will store all our good gathered information tool_info = {} - + # first we get its properties tool_properties = "" exec "tool_properties = ToolSvc.%s.properties()" % (tool_ToolSvc_name) - + # we also get the property 'no value' string for this tool (to use later) tool_novalue = "" exec "tool_novalue = ToolSvc.%s.propertyNoValue" % (tool_ToolSvc_name) @@ -200,22 +183,22 @@ class ToolInterrogator: # and we get the default property values tool_default_properties = "" exec "tool_default_properties = ToolSvc.%s.getDefaultProperties()" % (tool_ToolSvc_name) - + # then we check these properties for prop,value in tool_properties.iteritems(): - + # does the tool really have this property? tool_property_truth = False exec "tool_property_truth = hasattr(ToolSvc.%s,prop)" % (tool_ToolSvc_name) - + # if it does not... # then the value has likely not been set # ie. the default value is to be used if tool_property_truth == False: - + # does this property hold a 'no value' value? if value == tool_novalue: - + # if so, use the default value if tool_default_properties.__contains__(prop): value = tool_default_properties[prop] @@ -232,7 +215,7 @@ class ToolInterrogator: # Little hack because MaM doesn't know what GaudiKernel is if 'GaudiKernel.GaudiHandles.PublicToolHandleArray' in str(type(value)): continue - + # test if this value is JSON serializable try: @@ -330,7 +313,7 @@ class ToolInterrogator: # now we can return the results return monitCategoryObject_interesting_properties - + def __unicode_to_str__(self,input1=""): "Input a unicode string, list, or dict, and convert all unicode to str." diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/DumpDefaultMonConfig.sh b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/DumpDefaultMonConfig.sh new file mode 100755 index 000000000000..272067540097 --- /dev/null +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/DumpDefaultMonConfig.sh @@ -0,0 +1,23 @@ +#!/bin/bash +if [ "$#" -ne 5 ]; then + echo "Illegal number of parameters" + exit 1 +fi + +cleanheaddir="$1" +cleandir="$2" +release="$3" +filename="$4" +monmode="$5" + +echo $cleanheaddir $cleandir $release $filename $monmode +mkdir -p $cleanheaddir +cd $cleanheaddir +mkdir -p $cleandir +cd $cleandir +source $AtlasSetup/scripts/asetup.sh $release +if [[ ! -z $monmode ]]; then + athena.py -i -c "from TrigHLTMonitoring.MenuAwareMonitoring import MenuAwareMonitoring;mam = MenuAwareMonitoring();mam.setup_all_local_tools('"$monmode"');mam.dump_local_config_to_json(output_json_filename='"$filename"',comment='"$release" default',default=1);print 'Made default';quit()" +else + athena.py -i -c "from TrigHLTMonitoring.MenuAwareMonitoring import MenuAwareMonitoring;mam = MenuAwareMonitoring();mam.setup_all_local_tools();mam.dump_local_config_to_json(output_json_filename='"$filename"',comment='"$release" default',default=1);print 'Made default';quit()" +fi \ No newline at end of file diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/MCKtoCOOLmanual.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/MCKtoCOOLmanual.py new file mode 100644 index 000000000000..5e373e753b25 --- /dev/null +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/MCKtoCOOLmanual.py @@ -0,0 +1,85 @@ +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + +# Author: X. Hoad <xhoad@cern.ch> +# for use at P1 from /det/tdaq/hlt/mam/python + +import argparse +import sys +from PyCool import cool +import os +os.environ['MAM_CORAL_DBLOOKUP_PATH']='/det/tdaq/hlt/mam/authentication/' +os.environ['MAM_CORAL_AUTH_PATH']=os.environ['MAM_CORAL_DBLOOKUP_PATH'] +from TrigHLTMonitoring.MenuAwareMonitoringStandalone import MenuAwareMonitoringStandalone +from CoolConvUtilities.AtlCoolLib import indirectOpen + +parser = argparse.ArgumentParser(description="Store an MCK in COOL for a run or run range. If MCK = 0, the offline project and version to store as the tag can be specifed, otherwise, the tag is taken from the MCK info stored in the TriggerDB") +parser.add_argument('account', action='store', help='account with write privileges for oracle://ATONR_COOL;schema=ATLAS_COOLONL_TRIGGER;dbname=CONDBR2', type=str) +parser.add_argument('password', action='store', help='password for account', type=str) +parser.add_argument('--run', action='store', help='run to store MCK for, or first run of range if --runend is used (required)', type=int, required=True) +parser.add_argument('--mck', action='store', help='MCK to store (required)', type=int, required=True) +parser.add_argument('--info',action='store', default="", help='info to store (optional)', type=str) +parser.add_argument('--runend', action='store', help='final run of range (inclusive) (optional)', type=int) +parser.add_argument('--project', action='store', default="", help='offline AtlasProject to use for tag if MCK = 0 (ignored if MCK != 0)', type=str) +parser.add_argument('--version', action='store', default="", help='offline AtlasVersion to use for tag if MCK = 0 (ignored if MCK != 0)', type=str) +args=parser.parse_args() + +if args.runend and args.run >= args.runend: + print "runend must be greater than run" + sys.exit(2) + +if args.mck == 0 and ( not args.project or not args.version ): + print "Please give the AtlasProject and AtlasVersion that you want to write MCK 0 to COOL for." + print "project and version arguments are compulsory for MCK 0." + sys.exit(2) + +if args.mck and ( args.project or args.version ): + print "The AtlasProject and AtlasVersion are taken from the MCK info, and should not be specified manually except for MCK 0." + sys.exit(2) + +print "Checking MCK is valid..." + +mam = MenuAwareMonitoringStandalone() + +if args.mck and not mam.oi.check_if_mck_id_exists(args.mck): + # check if a non-zero MCK is a real MCK + print "MCK does not exist" + sys.exit(1) +else: + print "MCK is valid" + +connstring = "oracle://ATONR_COOL;schema=ATLAS_COOLONL_TRIGGER;dbname=CONDBR2;user="+args.account+";password="+args.password +print "Using",connstring +coolDB=indirectOpen(connstring,readOnly=False,oracle=True,debug=True) +if coolDB is None: + print "Unable to connect to",connstring,"to write MCK to COOL." + sys.exit(1) + +# open the folder for writing +folder_name = 'MenuAwareMonConfigKey' +folder = coolDB.getFolder('/TRIGGER/HLT/' + folder_name ) + +# set up payload format +schema = [('MonConfigKey', cool.StorageType.UInt32), # the mck + ('Info', cool.StorageType.String4k)] # optional info (currently unused) +rspec = cool.RecordSpecification() +for col, type in schema: + rspec.extend(col, type) + +# set up the payload and tag +iov_since = (int(args.run)<<32) +if args.runend: + iov_until = (int(args.runend+1)<<32) +else: + iov_until = (int(args.run+1)<<32) +data = cool.Record(rspec) +data['MonConfigKey'] = args.mck +data['Info'] = args.info +chan = 0 +if args.mck: + release = mam.oi.read_mck_info_from_db(args.mck)['MCK_ATHENA_VERSION'] +else: + release = args.project + '-' + args.version +tag = folder_name.split('/')[-1] + '-' + release + +print "Storing MCK",args.mck,"with tag",tag,"for IOV",iov_since,"to",iov_until +folder.storeObject(iov_since,iov_until,data,chan,tag) diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/RunProcessWithMonitor.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/RunProcessWithMonitor.py new file mode 100755 index 000000000000..ab2af80bfd87 --- /dev/null +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/scripts/RunProcessWithMonitor.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Usage: python RunProcessWithMonitor.py [args] +# Purpose: workaround to calling asetup via subprocess in athena + +import sys +import subprocess +from select import select + +TIMEOUT = 10 +print sys.argv[1:] + +while True: + # start a new process to monitor + child = subprocess.Popen(sys.argv[1:], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) + while True: + try: + rlist,_,_ = select([child.stdout], [], [], TIMEOUT) + if rlist: + print child.communicate()[0] + else: + # timeout occurred, did the process finish? + if child.poll() is not None: + # child process completed (or was killed, but didn't hang), we are done + print 'Process complete' + else: + # otherwise, kill the child + child.communicate(input='quit()')[0] + break + except Exception as e: + if str(e) != 'I/O operation on closed file': + print type(e) + print e + # now we need to exit via os._exit rather than sys.exit, as sys.exit hangs if we are running in athena + import os + os._exit(0) diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/share/HLTMonitoring_topOptions.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/share/HLTMonitoring_topOptions.py index fca2865725f9..c9959bb305d3 100755 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/share/HLTMonitoring_topOptions.py +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/share/HLTMonitoring_topOptions.py @@ -1,5 +1,5 @@ -print "hello from TrigHLTMonitoringJobOptions_forRecExCommission.py" - +from AthenaCommon.Logging import logging +log = logging.getLogger( 'TrigHLTMonitoring/HLTMonitoring_topOptions' ) ######## flags ########### if not 'HLTMonFlags' in dir(): @@ -8,12 +8,12 @@ if not 'HLTMonFlags' in dir(): if not 'DQMonFlags' in dir(): from AthenaMonitoring.DQMonFlags import DQMonFlags - ########## control step assignment ######### +########## control step assignment ######### if DQMonFlags.monManEnvironment == 'tier0Raw': # we are in RAW->ESD step # run all tools *except* the following (these are run in ESD->AOD) - print 'HLTMonitoring_topOptions.py: environment is tier0Raw' + log.info('Environment is tier0Raw') HLTMonFlags.doGeneral = False HLTMonFlags.doBjet = False HLTMonFlags.doBphys = False @@ -28,26 +28,25 @@ if DQMonFlags.monManEnvironment == 'tier0Raw': HLTMonFlags.doOfflineTauTTP = False HLTMonFlags.doIDJpsiMon = False elif DQMonFlags.monManEnvironment == 'tier0ESD': + log.info('Environment is tier0ESD') # we are in ESD->AOD step # run all tools *except* the following (these are run in RAW->ESD) - print 'HLTMonitoring_topOptions.py: environment is tier0ESD' - # HLTMonFlags.doCalo = False + # HLTMonFlags.doCalo = False + # HLTMonFlags.doMaM = True + # HLTMonFlags.doMaM_ApplyMCK = True elif DQMonFlags.monManEnvironment == 'tier0': # we are in RAW -> ALL, run everything - print 'HLTMonitoring_topOptions.py: environment is tier0' + log.info('Environment is tier0') else : - print 'HLTMonitoring_topOptions.py: environment is neither tier0Raw nor tier0ESD' - print 'HLTMonitoring_topOptions.py: switching all tools off...' + log.info('Environment is neither tier0Raw nor tier0ESD') + log.info('Switching all tools off...') HLTMonFlags.doGeneral = False HLTMonFlags.doMonTier0 = False -### Menu-aware monitoring flags ### -#HLTMonFlags.doMaM = True # default is False -#HLTMonFlags.doMaM_ExtractAndDumpConfigs = True # default is False -#HLTMonFlags.MaM_OutputJSON = "mam_configs.json" # default is "mam_configs.json" -#HLTMonFlags.doMaM_ApplyMCK = True # default is False -#HLTMonFlags.MCK = -1 # default is -1 - # temporarily disabling IDJpsiMon to deal with ATR-12037 HLTMonFlags.doIDJpsiMon = False + +log.info("HLTMonFlags are:") +print HLTMonFlags + include( "TrigHLTMonitoring/addMonTools.py" ) diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/share/addMonTools.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/share/addMonTools.py index ab116f8b2042..3826066fdd1f 100644 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/share/addMonTools.py +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/share/addMonTools.py @@ -6,6 +6,8 @@ from AthenaCommon.AlgSequence import AlgSequence topSequence = AlgSequence() +from AthenaCommon.Logging import logging +log = logging.getLogger( 'TrigHLTMonitoring/addMonTools' ) from AthenaMonitoring.AthenaMonitoringConf import AthenaMonManager @@ -16,7 +18,7 @@ from RecExConfig.RecFlags import rec # if not 'HLTMonFlags' in dir(): # from TrigHLTMonitoring.HLTMonFlags import HLTMonFlags -# +# # if not 'DQMonFlags' in dir(): # from AthenaMonitoring.DQMonFlags import DQMonFlags @@ -36,7 +38,7 @@ if HLTMonFlags.doGeneral: from TrigHLTMonitoring.TrigHLTMonitoringConfig import HLTGeneralTool HLTMonManager.AthenaMonTools += HLTGeneralTool() except: - print "Problems with the general HLTMonTool" + log.info("Problems with the general HLTMonTool, tool not enabled") if HLTMonFlags.doMonTier0: @@ -49,7 +51,7 @@ if HLTMonFlags.doMonTier0: from TrigEgammaMonitoring.TrigEgammaMonitoringConfig import TrigEgammaMonitoringTool HLTMonManager.AthenaMonTools += TrigEgammaMonitoringTool() except: - print "Problems with HLTEgammaTool, tool not enabled" + log.info("Problems with HLTEgammaTool, tool not enabled") # HLTCaloTool - Makes basic comparisons between online/offline if HLTMonFlags.doCalo: @@ -58,15 +60,15 @@ if HLTMonFlags.doMonTier0: from TrigCaloMonitoring.TrigCaloMonitoringConfig import HLTCaloMonitoringTool HLTMonManager.AthenaMonTools+=HLTCaloMonitoringTool() except: - print "Problems with HLTCaloTool, tool not enabled" - + log.info("Problems with HLTCaloTool, tool not enabled") + # HLTMuonMonTool - Multiple Muon algorithms EDM Dumper if HLTMonFlags.doMuon: try: from TrigMuonMonitoring.TrigMuonMonitoringConfig import TrigMuonMonitoringTool HLTMonManager.AthenaMonTools += TrigMuonMonitoringTool() except: - print "Problems with HLTMuonTool, tool not enabled" + log.info("Problems with HLTMuonTool, tool not enabled") # IDtrk HLTMonTool if HLTMonFlags.doIDtrk: @@ -74,7 +76,7 @@ if HLTMonFlags.doMonTier0: from TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig import TrigIDtrkMonitoringTool HLTMonManager.AthenaMonTools += TrigIDtrkMonitoringTool() except: - print "Problems with HLTIDtrkTool, tool not enabled" + log.info("Problems with HLTIDtrkTool, tool not enabled") # MET HLTMonTool if HLTMonFlags.doMET: @@ -82,7 +84,7 @@ if HLTMonFlags.doMonTier0: from TrigMETMonitoring.TrigMETMonitoringConfig import HLTMETMonitoringTool HLTMonManager.AthenaMonTools += HLTMETMonitoringTool() except: - print "Problems with HLTMETTool, tool not enabled" + log.info("Problems with HLTMETTool, tool not enabled") # Tau HLTMonTool if HLTMonFlags.doTau: @@ -90,31 +92,31 @@ if HLTMonFlags.doMonTier0: from TrigTauMonitoring.TrigTauMonitoringConfig import TrigTauMonitoringTool HLTMonManager.AthenaMonTools += TrigTauMonitoringTool() except: - print "Problems with HLTTauTool, tool not enabled" - + log.info("Problems with HLTTauTool, tool not enabled") + # Jet HLTMonTool if HLTMonFlags.doJet: try: from TrigJetMonitoring.TrigJetMonitoringConfig import TrigJetMonitoringTool HLTMonManager.AthenaMonTools += TrigJetMonitoringTool() except: - print "Problems with HLTJetTool, tool not enabled" - + log.info("Problems with HLTJetTool, tool not enabled") + # b-jet HLTMonTool if HLTMonFlags.doBjet and rec.doInDet: try: from TrigBjetMonitoring.TrigBjetMonitoringConfig import TrigBjetMonitoringConfig HLTMonManager.AthenaMonTools += TrigBjetMonitoringConfig() except: - print "Problems with HLTBjetTool, tool not enabled" - + log.info("Problems with HLTBjetTool, tool not enabled") + # B-phys HLTMonTool if HLTMonFlags.doBphys: try: from TrigBphysMonitoring.TrigBphysMonitoringConfig import TrigBphysMonitoringTool HLTMonManager.AthenaMonTools += TrigBphysMonitoringTool() except: - print "Problems with HLTBphysTool, tool not enabled" + log.info("Problems with HLTBphysTool, tool not enabled") # MinBias HLTMonTool if HLTMonFlags.doMinBias: @@ -122,7 +124,7 @@ if HLTMonFlags.doMonTier0: from TrigMinBiasMonitoring.TrigMinBiasMonitoringConfig import TrigMinBiasMonitoringTool HLTMonManager.AthenaMonTools += TrigMinBiasMonitoringTool() except: - print "Problems with HLTMinBiasDumpTool, tool not enabled" + log.info("Problems with HLTMinBiasDumpTool, tool not enabled") # MinBias HLTMonTool if HLTMonFlags.doIDJpsiMon: @@ -130,7 +132,7 @@ if HLTMonFlags.doMonTier0: from TrigIDJpsiMonitoring.TrigIDJpsiMonitoringConfig import TrigIDJpsiMonitoringTool HLTMonManager.AthenaMonTools += TrigIDJpsiMonitoringTool() except: - print "Problems with, TrigIDJpsiMonTool, tool not enabled" + log.info("Problems with TrigIDJpsiMonTool, tool not enabled") ################ Dump Tools ################ @@ -143,145 +145,159 @@ if HLTMonFlags.doDump: from TrigEgammaMonitoring.TrigEgammaMonitoringConfig import HLTEgammaMonitoringDumpTool HLTMonManager.AthenaMonTools += HLTEgammaMonitoringDumpTool() except: - print "Problems with HLTEgammaDumpTool, tool not enabled" + log.info("Problems with HLTEgammaDumpTool, tool not enabled") #Make the custom tau TTP ntuples. - if HLTMonFlags.doOfflineTauTTP : + if HLTMonFlags.doOfflineTauTTP : try: from TrigTauPerformAthena.TrigTauPerformAthenaConf import TrigTauPerformAthenaAlgo ttpalgo = TrigTauPerformAthenaAlgo() from AthenaCommon.AlgSequence import AlgSequence topSequence = AlgSequence() - topSequence += ttpalgo + topSequence += ttpalgo except: - print "Problems with OfflineTauTTP, tool not enabled" + log.info("Problems with OfflineTauTTP, tool not enabled") ############################################ - ########## Menu-aware Monitoring ########### -#HLTMonFlags.doMaM = False -if HLTMonFlags.doMaM: +#HLTMonFlags.doMaM = True +#HLTMonFlags.doMaM_ApplyMCK = True + +if HLTMonFlags.doMaM == True: # MaM needs to check whether it is running in a Trigger reprocessing job or not, and start an instance of MaM connected to the correct database accordingly trigger_reco_tf_job = False - + if hasattr(runArgs, "DBserver") and runArgs.DBserver == "TRIGGERDBREPR": - trigger_reco_tf_job = True - + trigger_reco_tf_job = True elif hasattr(runArgs, "triggerConfig") and "TRIGGERDBREPR" in runArgs.triggerConfig: - trigger_reco_tf_job = True - + trigger_reco_tf_job = True + from TrigHLTMonitoring.MenuAwareMonitoring import MenuAwareMonitoring if trigger_reco_tf_job: - mam = MenuAwareMonitoring("TRIGGERDBREPR") + log.info("Will attempt to doMaM with TRIGGERDBREPR") + mam = MenuAwareMonitoring("TRIGGERDBREPR_R") else: + log.info("Will attempt to doMaM with TRIGGERDB") mam = MenuAwareMonitoring() - - if mam.connected_to_oracle == False: - # how to raise this as an error? - print "Menu-aware Monitoring error: Cannot doMaM without database connection. Exiting MaM." - else: - - # if we are applying configurations to tools according to an MCK, then do that here - if HLTMonFlags.doMaM_ApplyMCK: - # if a specific Monitoring Configuration Key (MCK) has been set, then use it - if HLTMonFlags.MCK.StoredValue > 0: + if mam.ms.connected_to_oracle == False: + log.error("Menu-Aware Monitoring: Cannot doMaM without database connection") + else: + # if a specific Monitoring Configuration Key (MCK) has been set, then use it + if HLTMonFlags.MCK.StoredValue > 0: + if mam.ms.oi.check_if_mck_id_exists( HLTMonFlags.MCK.StoredValue ): if mam.does_mck_athena_version_match_current_athena_version( HLTMonFlags.MCK.StoredValue ): - print "Using trigger Monitoring Configuration Key (MCK)",HLTMonFlags.MCK.StoredValue - mam.apply_mck( HLTMonFlags.MCK.StoredValue ) - else: - print "MCK",HLTMonFlags.MCK.StoredValue,"cannot be applied as it is a patch for a different Athena version. No MCK applied." - - # if HLTMonFlags.MCK is -1 (the default) we try to determine the MCK automatically - if HLTMonFlags.MCK.StoredValue == -1: - - if trigger_reco_tf_job: - # for trigger repro jobs, need to check the transform arguments and get the SMK from there, then use the linked MCK. - # no Cool interaction in these jobs - SMKrepr = None - - if hasattr(runArgs, "DBsmkey") and runArgs.DBsmkey!="NONE": - SMKrepr = int(runArgs.DBsmkey) - - elif hasattr(runArgs, "triggerConfig") and runArgs.triggerConfig!="NONE": - SMKrepr = int(runArgs.triggerConfig.split(":")[-1].split(",")[0]) - - else: - print "Could not get SMK from DBsmkey or triggerConfig runArgs. No MCK applied." - - if SMKrepr is not None: - print "SMK SuperMasterKey from runArgs =",SMKrepr - # we now have the required input info. Use mam to get the appropriate MCK - MCKfromSMKrepr = mam.get_mck_id_from_smk(SMKrepr) - - # if the MCK is > 0 and is from the right release then apply it, otherwise use the default tool configurations - if MCKfromSMKrepr > 0: - if mam.does_mck_athena_version_match_current_athena_version( MCKfromSMKrepr ): - HLTMonFlags.MCK.StoredValue = MCKfromSMKrepr - print "Using trigger Monitoring Configuration Key (MCK) from SMK link =",HLTMonFlags.MCK.StoredValue - mam.apply_mck( HLTMonFlags.MCK.StoredValue ) - else: - print "MCK",MCKfromSMKrepr,"cannot be applied as it is a patch for a different Athena version. No MCK applied." - elif MCKfromSMKrepr == 0: + log.info("MCK found via transform %d" % HLTMonFlags.MCK.StoredValue ) + # if we are applying configurations to tools according to an MCK, then do that here + if HLTMonFlags.doMaM_ApplyMCK: + log.info("Applying MCK %d" % HLTMonFlags.MCK.StoredValue) + mam.apply_mck( HLTMonFlags.MCK.StoredValue ) + else: + log.error("MCK for a different release found via transform: %d" % HLTMonFlags.MCK.StoredValue ) + else: + log.error("MCK found via transform (%d) is not a vaid MCK." % HLTMonFlags.MCK.StoredValue ) + + # if HLTMonFlags.MCK is -1 (the default) we try to determine the MCK automatically, as long as this is not MC + is_not_sim = True + from RecExConfig.InputFilePeeker import inputFileSummary + if inputFileSummary.__contains__('evt_type'): + if 'IS_SIMULATION' in inputFileSummary['evt_type']: + log.info("Will not try to get MCK automatically as we are running on MC") + is_not_sim = False + + if HLTMonFlags.MCK.StoredValue == -1 and is_not_sim: + if trigger_reco_tf_job: + # for trigger repro jobs, need to check the transform arguments and get the SMK from there, then use the linked MCK. + # no COOL interaction in these jobs + SMKrepr = None + + if hasattr(runArgs, "DBsmkey") and runArgs.DBsmkey!="NONE": + SMKrepr = int(runArgs.DBsmkey) + + elif hasattr(runArgs, "triggerConfig") and runArgs.triggerConfig!="NONE": + SMKrepr = int(runArgs.triggerConfig.split(":")[-1].split(",")[0]) + + else: + log.error("Menu-Aware Monitoring: Could not get SMK from DBsmkey or triggerConfig runArgs") + + if SMKrepr is not None: + log.info("SMK from runArgs %d" % SMKrepr) + # we now have the required input info. Use mam to get the appropriate MCK + MCKfromSMKrepr = mam.get_mck_id_from_smk(SMKrepr) + + # if the MCK is > 0 and is from the right release then apply it, otherwise use the default tool configurations + if MCKfromSMKrepr > 0: + if mam.does_mck_athena_version_match_current_athena_version( MCKfromSMKrepr ): HLTMonFlags.MCK.StoredValue = MCKfromSMKrepr - print "Monitoring Configuration Key (MCK) from SMK link = 0, default configuration used" - - """else: - - # try to get the MCK from Cool - from RecExConfig.InputFilePeeker import inputFileSummary - #print "inputFileSummary =",inputFileSummary + log.info("MCK found via SMK link %d" % HLTMonFlags.MCK.StoredValue) + if HLTMonFlags.doMaM_ApplyMCK: + log.info("Applying MCK %d" % HLTMonFlags.MCK.StoredValue) + mam.apply_mck( HLTMonFlags.MCK.StoredValue ) + else: + log.info("MCK for a different release found via SMK link: %d -> MCK ignored." % HLTMonFlags.MCK.StoredValue) + elif MCKfromSMKrepr == 0: + HLTMonFlags.MCK.StoredValue = MCKfromSMKrepr + log.info("MCK found via SMK link: 0 -> Default configuration used.") + + else: + # try to get the MCK from COOL + if inputFileSummary.__contains__('bs_metadata') or inputFileSummary.__contains__('run_number'): + # get the run number for the input if inputFileSummary.__contains__('bs_metadata'): - # get the run number and lumi_block for the input run_number = inputFileSummary['bs_metadata']['run_number'] - lumi_block = inputFileSummary['bs_metadata']['LumiBlock'] - from PyCool import cool - - pointintime = (int(run_number)<<32) + int(lumi_block) # start from lumiblock 0 (or 1?) - - # try to connect to the COOL database - from CoolConvUtilities.AtlCoolLib import indirectOpen - connstring = "COOLONL_TRIGGER/CONDBR2" # get the MCK from Cool - coolDB=indirectOpen(connstring,oracle='True') - if coolDB is None: - print "Unable to connect to",connstring,"to get MCK from Cool." - else: - # try to get the MCK out of COOL - foldername = 'MenuAwareMonConfigKey' - MCKfolder=coolDB.getFolder('/TRIGGER/HLT/' + foldername ) - release_tag = foldername + '-' + mam.current_athena_version - - # need to retrieve for the right release using tags using a try-except - try: - retrieved_obj=MCKfolder.findObject(pointintime,0,release_tag) - retrieved_payload=retrieved_obj.payload() - retrieved_format=retrieved_payload['MonConfigKey'] - MonitoringConfigurationKey = int(retrieved_format) - HLTMonFlags.MCK.StoredValue = MonitoringConfigurationKey - - if HLTMonFlags.MCK.StoredValue > 0: - print "Using trigger Monitoring Configuration Key (MCK) from Cool =",HLTMonFlags.MCK.StoredValue - mam.apply_mck( HLTMonFlags.MCK.StoredValue ) - elif HLTMonFlags.MCK.StoredValue == 0: - print "Monitoring Configuration Key (MCK) from Cool = 0, default configuration used" - - check_SMK = False - - except ( ObjectNotFound, TagNotFound ): - print "No MCK in Cool folder",foldername,"for release",mam.current_athena_version,", no MCK applied." - - coolDB.closeDatabase()""" - - - # if dumping the tool configurations (as a .json file) has been requested, then do that here - if HLTMonFlags.doMaM_ExtractAndDumpConfigs: + else: + run_number = int(inputFileSummary['run_number'][0]) + pointintime = (int(run_number)<<32) + + from PyCool import cool + # try to connect to the COOL database + from CoolConvUtilities.AtlCoolLib import indirectOpen + connstring = "COOLONL_TRIGGER/CONDBR2" # get the MCK from COOL + coolDB=indirectOpen(connstring,oracle='True') + if coolDB is None: + log.error("Menu-Aware Monitoring: Unable to connect to %s to get MCK from COOL." % connstring) + else: + # try to get the MCK out of COOL + foldername = 'MenuAwareMonConfigKey' + MCKfolder=coolDB.getFolder('/TRIGGER/HLT/' + foldername ) + release_tag = foldername + '-' + mam.ms.current_athena_version + + # need to retrieve for the right release using tags using a try-except + try: + retrieved_obj=MCKfolder.findObject(pointintime,0,release_tag) + retrieved_payload=retrieved_obj.payload() + retrieved_format=retrieved_payload['MonConfigKey'] + MonitoringConfigurationKey = int(retrieved_format) + HLTMonFlags.MCK.StoredValue = MonitoringConfigurationKey + coolDB.closeDatabase() + if HLTMonFlags.MCK.StoredValue == 0: + log.info("MCK 0 found in COOL folder %s for release %s -> No MCK applied." % (foldername,mam.ms.current_athena_version)) + except: + log.info("No MCK in COOL folder %s for release %s -> No MCK applied." % (foldername,mam.ms.current_athena_version)) + + if HLTMonFlags.MCK.StoredValue > 0: + if mam.ms.oi.check_if_mck_id_exists( HLTMonFlags.MCK.StoredValue ): + if mam.does_mck_athena_version_match_current_athena_version( HLTMonFlags.MCK.StoredValue ): + log.info("MCK found via COOL %d" % HLTMonFlags.MCK.StoredValue) + if HLTMonFlags.doMaM_ApplyMCK: + log.info("Applying MCK %d" % HLTMonFlags.MCK.StoredValue) + mam.apply_mck( HLTMonFlags.MCK.StoredValue ) + else: + log.error("MCK for a different release (%d) found in COOL folder (%s) with tag (%s)." % (HLTMonFlags.MCK.StoredValue,foldername,release_tag)) + else: + log.error("MCK found via COOL (%d) in COOL folder (%s) with tag (%s) is not a vaid MCK." % (HLTMonFlags.MCK.StoredValue,foldername,release_tag)) + else: + log.error("Menu-Aware Monitoring: Unable to get run number from metadata") + # if dumping the tool configurations (as a .json file) has been requested, then do that here + if HLTMonFlags.doMaM_ExtractAndDumpConfigs == True: + log.info("Will attempt to extract final trigger monitoring tool configurations and dump them to %s" % (HLTMonFlags.MaM_OutputJSON.StoredValue)) # get updated configs for all tools, and dump them to HLTMonFlags.MaM_OutputJSON.StoredValue mam.get_current_local_info() - mam.dump_local_config_to_json(HLTMonFlags.MaM_OutputJSON.StoredValue) + mam.make_default_json(HLTMonFlags.MaM_OutputJSON.StoredValue,comment="Final job config") ############################################ diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/src/IHLTMonTool.cxx b/Trigger/TrigMonitoring/TrigHLTMonitoring/src/IHLTMonTool.cxx index af1c67807f01..1abf08d43e35 100755 --- a/Trigger/TrigMonitoring/TrigHLTMonitoring/src/IHLTMonTool.cxx +++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/src/IHLTMonTool.cxx @@ -93,8 +93,13 @@ StatusCode IHLTMonTool::initialize() { ATH_MSG_INFO("No TrigConfigTool provided, using TrigConfigSvc (default)"); sc = m_configsvc.retrieve(); if ( sc.isFailure() ) { - ATH_MSG_ERROR("Could not retrieve Trigger Config Svc"); - return sc; + ATH_MSG_WARNING("Could not retrieve TrigConfigSvc - trying TrigConf::xAODConfigTool"); + m_configTool = ToolHandle<TrigConf::ITrigConfigTool>("TrigConf::xAODConfigTool"); + sc = m_configTool.retrieve(); + if ( sc.isFailure() ) { + ATH_MSG_ERROR("Could not retrieve TrigConfigTool"); + return sc; + } } } else { -- GitLab