diff --git a/Control/AthenaMonitoring/python/AthenaMonitoringCfg.py b/Control/AthenaMonitoring/python/AthenaMonitoringCfg.py index 87e3cf4f87108fb944b0fe95f9facefcd7442e1a..3706e49c96d9a55006bfc4d06edbd08274348032 100644 --- a/Control/AthenaMonitoring/python/AthenaMonitoringCfg.py +++ b/Control/AthenaMonitoring/python/AthenaMonitoringCfg.py @@ -65,9 +65,6 @@ def AthenaMonitoringCfg(flags): info('Set up HLT monitoring') if flags.DQ.Environment == 'AOD': warning("HLT Monitoring enabled on AOD: this might cause issues as not all HLT collections are written to standard AOD!") - if not flags.Input.isMC: - error("HLT monitoring on data AOD input is not allowed - not all HLT collections are recorded to AOD!") - raise ValueError("HLT monitoring enabled on data AOD input.") from TrigHLTMonitoring.TrigHLTMonitorAlgorithm import TrigHLTMonTopConfig result.merge(TrigHLTMonTopConfig(flags)) diff --git a/Database/ConnectionManagement/AtlasAuthentication/CMakeLists.txt b/Database/ConnectionManagement/AtlasAuthentication/CMakeLists.txt index 469542ac3be80f30c3d7a7788a94cc307d84db92..25aecbd9b10a76f6e17e8dbe9d6cca55d297c528 100644 --- a/Database/ConnectionManagement/AtlasAuthentication/CMakeLists.txt +++ b/Database/ConnectionManagement/AtlasAuthentication/CMakeLists.txt @@ -28,7 +28,7 @@ if( NOT ATLAS_EXTERNAL ) endif() # Pick up authentication.xml: -set( AtlasAuthentication_native_version "v21" ) +set( AtlasAuthentication_native_version "v22" ) set( AtlasAuthentication_home "${ATLAS_EXTERNAL}/AtlasAuth/${AtlasAuthentication_native_version}" ) if (EXISTS ${AtlasAuthentication_home}/authentication.xml ) diff --git a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_2022_FullG4MT_QS_ttbar_MT.sh b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_2022_FullG4MT_QS_ttbar_MT.sh new file mode 100755 index 0000000000000000000000000000000000000000..0bd22d4b008258ae3cb09eb6bc7c8f809c7f2e27 --- /dev/null +++ b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_2022_FullG4MT_QS_ttbar_MT.sh @@ -0,0 +1,53 @@ +#!/bin/sh +# +# art-description: MC23-style RUN3 simulation using FullG4MT_QS in AthenaMT +# art-include: 24.0/Athena +# art-include: 24.0/AthSimulation +# art-include: main/Athena +# art-include: main/AthSimulation +# art-type: grid +# art-athena-mt: 8 +# art-architecture: '#x86_64-intel' +# art-output: test.*.HITS.pool.root +# art-output: log.* +# art-output: Config*.pkl + +export ATHENA_CORE_NUMBER=8 + +# RUN3 setup +# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-01 +Sim_tf.py \ + --CA \ + --multithreaded \ + --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-01' \ + --simulator 'FullG4MT_QS' \ + --postInclude 'PyJobTransforms.UseFrontier' \ + --preInclude 'EVNTtoHITS:Campaigns.MC23aSimulationMultipleIoV' \ + --geometryVersion 'default:ATLAS-R3S-2021-03-02-00' \ + --inputEVNTFile "/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/CampaignInputs/mc21/EVNT/mc21_13p6TeV.601229.PhPy8EG_A14_ttbar_hdamp258p75_SingleLep.evgen.EVNT.e8453/EVNT.29328277._003902.pool.root.1" \ + --outputHITSFile "test.CA.HITS.pool.root" \ + --maxEvents 50 \ + --jobNumber 1 \ + --postExec 'with open("ConfigSimCA.pkl", "wb") as f: cfg.store(f)' \ + --imf False + +rc=$? +mv log.EVNTtoHITS log.EVNTtoHITS.CA +echo "art-result: $rc simCA" +status=$rc + +rc2=-9999 +if [ $rc -eq 0 ] +then + ArtPackage=$1 + ArtJobName=$2 + art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --order-trees --mode=semi-detailed --diff-root --file=test.CA.HITS.pool.root + rc2=$? + if [ $status -eq 0 ] + then + status=$rc2 + fi +fi +echo "art-result: $rc2 regression" + +exit $status diff --git a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_ATLFAST3MT_QS_ttbar_MT.sh b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_ATLFAST3MT_QS_ttbar_MT.sh index e972669a3be164f5e7974aa0e51c6b3480628c0f..2a9b243902d2c9b8e5bf68f880131ef5c8505829 100755 --- a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_ATLFAST3MT_QS_ttbar_MT.sh +++ b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_ATLFAST3MT_QS_ttbar_MT.sh @@ -14,14 +14,14 @@ export ATHENA_CORE_NUMBER=8 # RUN3 setup -# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-01 +# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-04 Sim_tf.py \ --CA \ --multithreaded \ - --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-01' \ + --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-04' \ --simulator 'ATLFAST3MT_QS' \ --postInclude 'PyJobTransforms.UseFrontier' \ - --preInclude 'EVNTtoHITS:Campaigns.MC23aSimulationMultipleIoV' \ + --preInclude 'EVNTtoHITS:Campaigns.MC23eSimulationMultipleIoV' \ --geometryVersion 'default:ATLAS-R3S-2021-03-02-00' \ --inputEVNTFile "/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/CampaignInputs/mc21/EVNT/mc21_13p6TeV.601229.PhPy8EG_A14_ttbar_hdamp258p75_SingleLep.evgen.EVNT.e8453/EVNT.29328277._003902.pool.root.1" \ --outputHITSFile "test.CA.HITS.pool.root" \ diff --git a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_ATLFAST3MT_ttbar_MT.sh b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_ATLFAST3MT_ttbar_MT.sh index cffda8578ec0fcca0a5f02533b706c44751c999d..77543c7f7b0d5f443b389694abb001689bee9a86 100755 --- a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_ATLFAST3MT_ttbar_MT.sh +++ b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_ATLFAST3MT_ttbar_MT.sh @@ -13,14 +13,14 @@ export ATHENA_CORE_NUMBER=8 # RUN3 setup -# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-01 +# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-04 Sim_tf.py \ --CA \ --multithreaded \ - --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-01' \ + --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-04' \ --simulator 'ATLFAST3MT' \ --postInclude 'PyJobTransforms.UseFrontier' \ - --preInclude 'EVNTtoHITS:Campaigns.MC23aSimulationMultipleIoV' \ + --preInclude 'EVNTtoHITS:Campaigns.MC23eSimulationMultipleIoV' \ --geometryVersion 'default:ATLAS-R3S-2021-03-02-00' \ --inputEVNTFile "/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/CampaignInputs/mc21/EVNT/mc21_13p6TeV.601229.PhPy8EG_A14_ttbar_hdamp258p75_SingleLep.evgen.EVNT.e8453/EVNT.29328277._003902.pool.root.1" \ --outputHITSFile "test.CA.HITS.pool.root" \ diff --git a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_Monopoles_MT.sh b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_Monopoles_MT.sh index ac3fb6943e90089c21a42529cffc3cac88028a59..58d786c07a6d0d8b19530fd193f4269fdf5367d2 100755 --- a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_Monopoles_MT.sh +++ b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_Monopoles_MT.sh @@ -23,10 +23,10 @@ Sim_tf.py \ --inputEVNTFile="/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/ISF_Validation/mc21_13p6TeV.950542.MGPy8EG_DYSpinHalfMonopoles_1gD_1500GeV_valid.merge.EVNT.e8467_e8455.29631249._000005.pool.root.1" \ --maxEvents="100" \ --multithreaded="True" \ - --preInclude "EVNTtoHITS:Campaigns.MC23aSimulationMultipleIoV,G4DebuggingTools.DebugMonopole" \ + --preInclude "EVNTtoHITS:Campaigns.MC23eSimulationMultipleIoV,G4DebuggingTools.DebugMonopole" \ --skipEvents="0" \ --randomSeed="41" \ - --conditionsTag "default:OFLCOND-MC23-SDR-RUN3-01" \ + --conditionsTag "default:OFLCOND-MC23-SDR-RUN3-04" \ --geometryVersion="default:ATLAS-R3S-2021-03-02-00" \ --runNumber="950542" \ --AMITag="s3890" \ diff --git a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_Smuons_MT.sh b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_Smuons_MT.sh index 6d52732eca87c56e08bcedda6df0dba69b4cf47d..99ee5d659e25268fab6f0fa0e05b846374a52d22 100755 --- a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_Smuons_MT.sh +++ b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_Smuons_MT.sh @@ -17,16 +17,16 @@ export ATHENA_PROC_NUMBER=8 export ATHENA_CORE_NUMBER=8 # RUN3 setup -# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-01 +# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-04 Sim_tf.py \ --CA \ --inputEVNTFile="/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/ISF_Validation/mc21_13p6TeV.temp.DisplacedSmuonPairProduction.10events.pool.root" \ --maxEvents="10" \ --multithreaded="True" \ - --preInclude "EVNTtoHITS:Campaigns.MC23aSimulationMultipleIoV,G4DebuggingTools.DebugSleptonsLLP" \ + --preInclude "EVNTtoHITS:Campaigns.MC23eSimulationMultipleIoV,G4DebuggingTools.DebugSleptonsLLP" \ --skipEvents="0" \ --randomSeed="41" \ - --conditionsTag "default:OFLCOND-MC23-SDR-RUN3-01" \ + --conditionsTag "default:OFLCOND-MC23-SDR-RUN3-04" \ --geometryVersion="default:ATLAS-R3S-2021-03-02-00" \ --runNumber="950542" \ --AMITag="s3890" \ diff --git a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ZPrime_MT.sh b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ZPrime_MT.sh index 5cb2cfb639cc664add4632f877838a7a9c336b87..09cde8a8acafdaee71f7c755102899708daea411 100755 --- a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ZPrime_MT.sh +++ b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ZPrime_MT.sh @@ -18,14 +18,14 @@ export ATHENA_CORE_NUMBER=8 # RUN3 setup -# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-01 +# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-04 Sim_tf.py \ --CA \ --multithreaded \ - --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-01' \ + --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-04' \ --simulator 'FullG4MT_QS' \ --postInclude 'PyJobTransforms.UseFrontier' \ - --preInclude 'EVNTtoHITS:Campaigns.MC23aSimulationMultipleIoV' \ + --preInclude 'EVNTtoHITS:Campaigns.MC23eSimulationMultipleIoV' \ --geometryVersion 'default:ATLAS-R3S-2021-03-02-00' \ --inputEVNTFile '/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/ISF_Validation/mc23_13p6TeV.800030.Py8EG_A14NNPDF23LO_flatpT_Zprime_Extended.evgen.EVNT.e8514.33116368._004446.pool.root.1' \ --outputHITSFile 'test.CA.HITS.pool.root' \ diff --git a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MPvsST.sh b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MPvsST.sh index d9bc06bf255c66c1589031be9a9b111854f1e3aa..8b523cf8e5b0da4b1f3d4c0892704d622cf7b125 100755 --- a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MPvsST.sh +++ b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MPvsST.sh @@ -1,6 +1,6 @@ #!/bin/sh # -# art-description: Run MP and ST simulation, reading ttbar events, writing HITS, using MC23a geometry and conditions +# art-description: Run MP and ST simulation, reading ttbar events, writing HITS, using MC23e geometry and conditions # art-include: 23.0/Athena # art-include: 23.0/AthSimulation # art-include: 24.0/Athena @@ -24,7 +24,7 @@ Sim_tf.py \ --outputHITSFile "test.MP.HITS.pool.root" \ --maxEvents 50 \ --geometryVersion 'default:ATLAS-R3S-2021-03-02-00' \ - --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-01' \ + --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-04' \ --simulator 'FullG4MT_QS' \ --postInclude 'default:PyJobTransforms.UseFrontier' \ --preInclude 'EVNTtoHITS:Campaigns.MC23SimulationSingleIoV' \ @@ -44,7 +44,7 @@ Sim_tf.py \ --outputHITSFile "temp.ST.HITS.pool.root" \ --maxEvents 50 \ --geometryVersion 'default:ATLAS-R3S-2021-03-02-00' \ - --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-01' \ + --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-04' \ --simulator 'FullG4MT_QS' \ --postInclude 'default:PyJobTransforms.UseFrontier' \ --preInclude 'EVNTtoHITS:Campaigns.MC23SimulationSingleIoV' \ diff --git a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MT.sh b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MT.sh index 679195d9aa939379c012f1a254736ccc5f1918f2..7f8422bbd89471318a7bcaa9653e014d409fc0a4 100755 --- a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MT.sh +++ b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MT.sh @@ -17,14 +17,14 @@ export ATHENA_CORE_NUMBER=8 # RUN3 setup -# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-01 +# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-04 Sim_tf.py \ --CA \ --multithreaded \ - --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-01' \ + --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-04' \ --simulator 'FullG4MT_QS' \ --postInclude 'PyJobTransforms.UseFrontier' \ - --preInclude 'EVNTtoHITS:Campaigns.MC23aSimulationMultipleIoV' \ + --preInclude 'EVNTtoHITS:Campaigns.MC23eSimulationMultipleIoV' \ --geometryVersion 'default:ATLAS-R3S-2021-03-02-00' \ --inputEVNTFile "/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/CampaignInputs/mc21/EVNT/mc21_13p6TeV.601229.PhPy8EG_A14_ttbar_hdamp258p75_SingleLep.evgen.EVNT.e8453/EVNT.29328277._003902.pool.root.1" \ --outputHITSFile "test.CA.HITS.pool.root" \ diff --git a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MTvsST.sh b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MTvsST.sh index dd0cbc0bf9c1f9138c7e6793d0d5bfdb64d92a3d..46e6d483e598872db8910c3dfd90a72863006cbd 100755 --- a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MTvsST.sh +++ b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_QS_ttbar_MTvsST.sh @@ -1,6 +1,6 @@ #!/bin/sh # -# art-description: Run MT and ST simulation, reading ttbar events, writing HITS, using MC23a geometry and conditions +# art-description: Run MT and ST simulation, reading ttbar events, writing HITS, using MC23e geometry and conditions # art-include: 23.0/Athena # art-include: 23.0/AthSimulation # art-include: 24.0/Athena @@ -24,10 +24,10 @@ Sim_tf.py \ --outputHITSFile "test.MT.HITS.pool.root" \ --maxEvents 50 \ --geometryVersion 'default:ATLAS-R3S-2021-03-02-00' \ - --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-01' \ + --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-04' \ --simulator 'FullG4MT_QS' \ --postInclude 'default:PyJobTransforms.UseFrontier' \ - --preInclude 'EVNTtoHITS:Campaigns.MC23aSimulationMultipleIoV' \ + --preInclude 'EVNTtoHITS:Campaigns.MC23eSimulationMultipleIoV' \ --jobNumber 1 \ --postExec 'with open("ConfigSimMT.pkl", "wb") as f: cfg.store(f)' \ --imf False @@ -45,10 +45,10 @@ Sim_tf.py \ --outputHITSFile "test.ST.HITS.pool.root" \ --maxEvents 50 \ --geometryVersion 'default:ATLAS-R3S-2021-03-02-00' \ - --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-01' \ + --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-04' \ --simulator 'FullG4MT_QS' \ --postInclude 'default:PyJobTransforms.UseFrontier' \ - --preInclude 'EVNTtoHITS:Campaigns.MC23aSimulationMultipleIoV' \ + --preInclude 'EVNTtoHITS:Campaigns.MC23eSimulationMultipleIoV' \ --jobNumber 1 \ --postExec 'with open("ConfigSimST.pkl", "wb") as f: cfg.store(f)' \ --imf False diff --git a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_ttbar_MT.sh b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_ttbar_MT.sh index 24161f66d4362ddc8082d662cee6abbb71b533b4..1a2d192baeb98cb0c8d4f3c460dea2e182808162 100755 --- a/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_ttbar_MT.sh +++ b/Simulation/Tests/ISF_ValidationMT/test/test_RUN3_FullG4MT_ttbar_MT.sh @@ -15,14 +15,14 @@ export ATHENA_CORE_NUMBER=8 # RUN3 setup -# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-01 +# ATLAS-R3S-2021-03-02-00 and OFLCOND-MC23-SDR-RUN3-04 Sim_tf.py \ --CA \ --multithreaded \ - --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-01' \ + --conditionsTag 'default:OFLCOND-MC23-SDR-RUN3-04' \ --simulator 'FullG4MT' \ --postInclude 'PyJobTransforms.UseFrontier' \ - --preInclude 'EVNTtoHITS:Campaigns.MC23aSimulationMultipleIoV' \ + --preInclude 'EVNTtoHITS:Campaigns.MC23eSimulationMultipleIoV' \ --geometryVersion 'default:ATLAS-R3S-2021-03-02-00' \ --inputEVNTFile "/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/CampaignInputs/mc21/EVNT/mc21_13p6TeV.601229.PhPy8EG_A14_ttbar_hdamp258p75_SingleLep.evgen.EVNT.e8453/EVNT.29328277._003902.pool.root.1" \ --outputHITSFile "test.CA.HITS.pool.root" \ diff --git a/TileCalorimeter/TileMonitoring/CMakeLists.txt b/TileCalorimeter/TileMonitoring/CMakeLists.txt index 4614de9887add6d961a016f86ee0483595818ce1..1056e141472d423c08a4661b7652f7f84ad3f92c 100644 --- a/TileCalorimeter/TileMonitoring/CMakeLists.txt +++ b/TileCalorimeter/TileMonitoring/CMakeLists.txt @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration # Declare the package name: atlas_subdir( TileMonitoring ) @@ -138,3 +138,8 @@ atlas_add_test( TileTBCellMonitorAlgorithm_test SCRIPT python -m TileMonitoring.TileTBCellMonitorAlgorithm PROPERTIES TIMEOUT 600 POST_EXEC_SCRIPT nopost.sh) + +atlas_add_test( RunTileTBMonitoring_test + SCRIPT python -m TileMonitoring.RunTileTBMonitoring --use-sqlite '' + PROPERTIES TIMEOUT 600 + POST_EXEC_SCRIPT nopost.sh) diff --git a/TileCalorimeter/TileMonitoring/python/RunTileTBMonitoring.py b/TileCalorimeter/TileMonitoring/python/RunTileTBMonitoring.py new file mode 100644 index 0000000000000000000000000000000000000000..c1bb97d5f71cec068bd749dabf33d5bdc9ff55b0 --- /dev/null +++ b/TileCalorimeter/TileMonitoring/python/RunTileTBMonitoring.py @@ -0,0 +1,321 @@ +#!/usr/bin/env python +# +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration +# +''' +@file RunTileTBMonitoring.py +@brief Script to run Tile TestBeam Reconstrcution/Monitoring +''' + +from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator +from AthenaConfiguration.Enums import Format, BeamType +from AthenaConfiguration.AutoConfigFlags import GetFileMD +from TileConfiguration.TileConfigFlags import TileRunType + +import os +import sys + + +def configureFlagsAndArgsFromPartition(flags, args, partition, log): + """ + Configure the flags and args from partition in online + + Configure the following flags from partition in online: + run type, run number, beam type, beam energy, project + + Configure the following args from partition in online: args.nsamples + """ + + from ipc import IPCPartition + from ispy import ISObject + ipcPartition = IPCPartition(partition) + if not ipcPartition.isValid(): + log.error('Partition: ' + ipcPartition.name() + ' is not valid') + sys.exit(1) + + # Set up default values + runType = 'Physics' + beamType = 'collisions' + beamEnergy = 200 # In TileTB: [GeV] + runNumber = 2400000 + project = 'data_H8' + + try: + runParams = ISObject(ipcPartition, 'RunParams.SOR_RunParams', 'RunParams') + except Exception: + log.warning(f'No Run Parameters in IS => Set defaults: partition: {partition}, beam type: {beamType}' + + f', beam energy: {beamEnergy}, run number: {runNumber}, project tag: {project}') + else: + runParams.checkout() + beamType = runParams.beam_type + beamEnergy = runParams.beam_energy + runNumber = runParams.run_number + project = runParams.T0_project_tag + runType = runParams.run_type + log.info(f'RUN CONFIGURATION: run type: {runType}, beam type: {beamType}' + + f', beam energy: {beamEnergy}, run number: {runNumber}, project: {project}') + + try: + cisParams = ISObject(ipcPartition, 'TileParams.cispar', 'TileCISparameters') + except Exception: + log.info('Could not find Tile Parameters in IS') + else: + try: + cisParams.checkout() + except Exception: + log.info("Could not get Tile Parameters from IS") + else: + log.info(f'TILE CONFIGURATION: CISPAR size: {len(cisParams.data)}') + cispar = 'TILE CONFIGURATION: CISPAR: ' + for d in cisParams.data: + cispar += ' ' + str(d) + log.info(cispar) + + if len(cisParams.data) == 16: + data = cisParams.data + if data[12] == 1: + runType = 'Physics' + elif data[12] == 2: + runType = 'Laser' + elif data[12] == 4: + runType = 'Pedestals' + elif data[12] == 8: + runType = 'CIS' + + log.info(f'TILE CONFIGURATION: RunType: {runType}, Mode: {data[0]}, Samples: {data[1]}, Pipeline: {data[2]}' + + f', I3Delay: {data[3]}, Event: {data[4]}, Phase: {data[5]}, DAC: {data[6]}, Capacity: {data[7]}') + + # Try to get number of samples from partition + nSamples = 15 # Default number of samples + try: + dspConfig = ISObject(ipcPartition, 'TileParams.TileCal_DSPConfig', 'TileCal_IS_DSPConfig') + except Exception: + log.info("Could not find Tile DSP Config in IS => set default number of samples to {nSamples}") + else: + try: + dspConfig.checkout() + except Exception: + log.info("Could not get Tile DSP Config from IS => set default number of samples to {nSamples}") + else: + nSamples = dspConfig.samples + log.info("Set number of samples from DSP Config in IS: {nSamples}") + + if 'Physics' in runType: + flags.Tile.RunType = TileRunType.PHY + elif 'CIS' in runType: + flags.Tile.RunType = TileRunType.MONOCIS if 'mono' in runType else TileRunType.CIS + elif 'Laser' in runType: + flags.Tile.RunType = TileRunType.LAS + elif 'Pedestals' in runType: + flags.Tile.RunType = TileRunType.PED + + flags.Beam.Type = BeamType(beamType) + flags.Beam.Energy = beamEnergy + flags.Input.ProjectName = project + flags.Input.RunNumbers = [runNumber] + args.nsamples = nSamples + + +def TileTestBeamMonitoringCfg(flags, fragIDs=[0x100, 0x101, 0x200, 0x201, 0x402], **kwargs): + + ''' Function to configure Tile TestBeam monitoring.''' + + acc = ComponentAccumulator() + + from TileMonitoring.TileTBBeamMonitorAlgorithm import TileTBBeamMonitoringConfig + acc.merge(TileTBBeamMonitoringConfig(flags, fragIDs=fragIDs)) + + from TileMonitoring.TileTBMonitorAlgorithm import TileTBMonitoringConfig + acc.merge(TileTBMonitoringConfig(flags, fragIDs=fragIDs)) + + from TileMonitoring.TileTBPulseMonitorAlgorithm import TileTBPulseMonitoringConfig + acc.merge(TileTBPulseMonitoringConfig(flags, timeRange=[-200, 200], fragIDs=fragIDs)) + + from TileMonitoring.TileTBCellMonitorAlgorithm import TileTBCellMonitoringConfig + acc.merge(TileTBCellMonitoringConfig(flags, timeRange=[-200, 200], fragIDs=fragIDs)) + + from TileMonitoring.TileDigitsFlxMonitorAlgorithm import TileDigitsFlxMonitoringConfig + acc.merge(TileDigitsFlxMonitoringConfig(flags, TileDigitsContainerFlx="TileDigitsFlxFiltered")) + + from TileMonitoring.TileRawChannelFlxMonitorAlgorithm import TileRawChannelFlxMonitoringConfig + acc.merge(TileRawChannelFlxMonitoringConfig(flags, TileRawChannelContainerFlx="TileRawChannelFlxFit")) + + return acc + + +if __name__ == '__main__': + + # Setup logs + from AthenaCommon.Logging import log + from AthenaCommon.Constants import INFO + log.setLevel(INFO) + + # Set the Athena configuration flags + from AthenaConfiguration.AllConfigFlags import initConfigFlags + from AthenaConfiguration.TestDefaults import defaultTestFiles + + flags = initConfigFlags() + parser = flags.getArgumentParser() + parser.add_argument('--preExec', help='Code to execute before locking configs') + parser.add_argument('--postExec', help='Code to execute after setup') + parser.add_argument('--printConfig', action='store_true', help='Print detailed Athena configuration') + parser.add_argument('--dumpArguments', action='store_true', help='Print arguments and exit') + parser.add_argument('--frag-ids', dest='fragIDs', nargs="*", default=['0x100', '0x101', '0x200', '0x201', '0x402'], + help='Tile Frag IDs of modules to be monitored. Empty=ALL') + parser.add_argument('--demo-cabling', dest='demoCabling', type=int, default=2018, help='Time Demonatrator cabling to be used') + parser.add_argument('--nsamples', type=int, default=15, help='Number of samples') + parser.add_argument('--use-sqlite', dest='useSqlite', default='/afs/cern.ch/user/t/tiledemo/public/efmon/condb/tileSqlite.db', + help='Providing local SQlite file, conditions constants will be used from it') + + parser.add_argument('--stateless', action="store_true", help='Run Online Tile TB monitoring in partition') + parser.add_argument('--partition', default="", help='EMON, Partition name, default taken from $TDAQ_PARTITION if not set') + parser.add_argument('--key', type=str, default='ReadoutApplication', help='EMON, Selection key, e.g.: ReadoutApplication (TileTB)') + parser.add_argument('--keyValue', default=['TileREB-ROS'], help='EMON, Key values, e.g.: TileREB-ROS (TileTB)') + parser.add_argument('--keyCount', type=int, default=0, help='EMON, key count, e.g. 5 to get five random SFIs') + parser.add_argument('--publishName', default='TilePT-stateless-tb', help='EMON, Name under which to publish histograms') + parser.add_argument('--include', default="", help='EMON, Regular expression to select histograms to publish') + parser.add_argument('--lvl1Items', default=[], help='EMON, A list of L1 bit numbers, default []') + parser.add_argument('--lvl1Names', default=[], help='EMON, A list of L1 bit names, default []') + parser.add_argument('--lvl1Logic', default='Ignore', choices=['And','Or','Ignore'], help='EMON, default: Ignore') + parser.add_argument('--lvl1Origin', default='TAV', choices=['TBP','TAP','TAV'], help='EMON, default: TAV') + parser.add_argument('--streamType', default='physics', help='EMON, HLT stream type (e.g. physics or calibration)') + parser.add_argument('--streamNames', default=['tile'], help='EMON, List of HLT stream names') + parser.add_argument('--streamLogic', default='Ignore', choices=['And','Or','Ignore'], help='EMON, default: Ignore') + parser.add_argument('--triggerType', type=int, default=256, help='EMON, LVL1 8 bit trigger type, default: 256') + parser.add_argument('--groupName', default="TileTBMon", help='EMON, Name of the monitoring group') + + update_group = parser.add_mutually_exclusive_group() + update_group.add_argument('--frequency', type=int, default=0, help='EMON, Frequency (in number of events) of publishing histograms') + update_group.add_argument('--updatePeriod', type=int, default=30, help='EMON, Frequency (in seconds) of publishing histograms') + + args, _ = parser.parse_known_args() + + if args.dumpArguments: + log.info('=====>>> FINAL ARGUMENTS FOLLOW') + print('{:40} : {}'.format('Argument Name', 'Value')) + for a,v in (vars(args)).items(): + print(f'{a:40} : {v}') + sys.exit(0) + + fragIDs = [int(fragID, base=16) for fragID in args.fragIDs] + + # Initially the following flags are not set up (they must be provided) + flags.Input.Files = [] + + # Initial configuration flags from command line arguments (to be used to set up defaults) + flags.fillFromArgs(parser=parser) + + # =======>>> Set the Athena configuration flags to defaults (can be overriden via comand line) + flags.DQ.useTrigger = False + flags.DQ.enableLumiAccess = False + flags.Exec.MaxEvents = 3 + flags.Common.isOnline = True + flags.GeoModel.AtlasVersion = 'ATLAS-R2-2015-04-00-00' + + flags.Tile.doFit = True + flags.Tile.useDCS = False + flags.Tile.NoiseFilter = 0 + flags.Tile.correctTime = False + flags.Tile.correctTimeJumps = False + flags.Tile.BestPhaseFromCOOL = False + flags.Tile.doOverflowFit = False + + flags.Exec.PrintAlgsSequence = True + + if args.stateless: + flags.Input.isMC = False + flags.Input.Format = Format.BS + partition = args.partition if args.partition else os.getenv('TDAQ_PARTITION', 'TileTB') + configureFlagsAndArgsFromPartition(flags, args, partition, log) + else: + flags.Tile.RunType = TileRunType.PHY + flags.Beam.Type = BeamType.Collisions + # Get beam energy from meta data (Tile TB setup: [GeV]) + flags.Beam.Energy = GetFileMD(flags.Input.Files).get("beam_energy", 100) + + if not (args.filesInput or flags.Input.Files): + flags.Input.Files = defaultTestFiles.RAW_RUN2 + + # =======>>> Override default configuration flags from command line arguments + flags.fillFromArgs(parser=parser) + + if not flags.Output.HISTFileName: + runNumber = flags.Input.RunNumbers[0] + flags.Output.HISTFileName = f'tiletbmon_{runNumber}.root' + + if args.preExec: + log.info('Executing preExec: %s', args.preExec) + exec(args.preExec) + + flags.lock() + + log.info('=====>>> FINAL CONFIG FLAGS SETTINGS FOLLOW:') + flags.dump(pattern='Tile.*|Input.*|Exec.*|IOVDb.[D|G].*', evaluate=True) + + # =======>>> Initialize configuration object, add accumulator, merge, and run + from AthenaConfiguration.MainServicesConfig import MainServicesCfg + cfg = MainServicesCfg(flags) + + # =======>>> Configure Tile raw data (digits) reading + from TileByteStream.TileByteStreamConfig import TileRawDataReadingCfg + cfg.merge( TileRawDataReadingCfg(flags, readMuRcv=False, + readDigits=True, + readRawChannel=True, + readDigitsFlx=True, + readBeamElem=True, + stateless=args.stateless) ) + + if args.stateless: + bsEmonInputSvc = cfg.getService('ByteStreamInputSvc') + bsEmonInputSvc.Partition = args.partition + bsEmonInputSvc.Key = args.key + bsEmonInputSvc.KeyValue = args.keyValue + bsEmonInputSvc.KeyCount = args.keyCount + bsEmonInputSvc.PublishName = args.publishName + bsEmonInputSvc.ISServer = 'Histogramming' + bsEmonInputSvc.UpdatePeriod = args.updatePeriod + bsEmonInputSvc.Frequency = args.frequency + bsEmonInputSvc.LVL1Items = args.lvl1Items + bsEmonInputSvc.LVL1Names = args.lvl1Names + bsEmonInputSvc.LVL1Logic = args.lvl1Logic + bsEmonInputSvc.LVL1Origin = args.lvl1Origin + bsEmonInputSvc.StreamType = args.streamType + bsEmonInputSvc.StreamNames = args.streamNames + bsEmonInputSvc.StreamLogic = args.streamLogic + bsEmonInputSvc.GroupName = args.groupName + bsEmonInputSvc.ProcessCorruptedEvents = True + bsEmonInputSvc.BufferSize = 2000 + + # =======>>> Configure reconstruction of Tile TestBeam data + from TileTBRec.TileTestBeamRecoConfig import TileTestBeamRecoCfg + cfg.merge( TileTestBeamRecoCfg(flags, useDemoCabling=args.demoCabling, nsamples=args.nsamples) ) + cfg.merge( TileTestBeamRecoCfg(flags, useDemoCabling=args.demoCabling, nsamples=16, useFELIX=True) ) + + if args.useSqlite: + cfg.getService('IOVDbSvc').overrideTags += [ + f'<prefix>/TILE</prefix> <db>sqlite://;schema={args.useSqlite};dbname={flags.IOVDb.DatabaseInstance}</db>', + # ROD folder does not exist in Sqlite file at the moment (should be added) + f'<prefix>/TILE/ONL01/STATUS/ROD</prefix> <db>COOLONL_TILE/{flags.IOVDb.DatabaseInstance}</db>' + ] + + # =======>>> Configure Tile TestBeam monitoring + cfg.merge(TileTestBeamMonitoringCfg(flags, fragIDs=fragIDs)) + + # =======>>> Configure ROD to ROB mapping + # Scan first event for all fragments to create proper ROD to ROB map + cfg.getCondAlgo('TileHid2RESrcIDCondAlg').RODStatusProxy = None + + # =======>>> Any last things to do? + if args.postExec: + log.info('Executing postExec: %s', args.postExec) + exec(args.postExec) + + if args.printConfig: + cfg.printConfig(withDetails=True, summariseProps=True, printDefaults=True) + + if args.config_only: + cfg.store(open('TileTestBeamMonitoring.pkl', 'wb')) + else: + sc = cfg.run() + # Success should be 0 + sys.exit(not sc.isSuccess()) diff --git a/Tools/Tier0ChainTests/CMakeLists.txt b/Tools/Tier0ChainTests/CMakeLists.txt index 9534e5321481c8a4687dc0e4cf3af120c423c826..2309c3f1bfefb3dfbf5eeba7b1ba8250867663d8 100644 --- a/Tools/Tier0ChainTests/CMakeLists.txt +++ b/Tools/Tier0ChainTests/CMakeLists.txt @@ -4,6 +4,4 @@ atlas_subdir( Tier0ChainTests ) # Install files from the package: -atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) -atlas_install_scripts( scripts/tct_*.py scripts/tct_*.sh ) atlas_install_scripts( test/*.sh ) diff --git a/Tools/Tier0ChainTests/README b/Tools/Tier0ChainTests/README deleted file mode 100644 index 1fac4baa20239a5da87949726bdb4d46587462d7..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/README +++ /dev/null @@ -1,179 +0,0 @@ -Version 1 - Max Baak, 20090303 - -Instructions for Tools/Tier0ChainTests package: ------------------------------------------------ - -... are found below. - -test/ directory: ----------------- -Contains the xml files for tct tests defined for specific atlas projects. Eg: - -#### -# test/Tier0ChainTests_DefaultConfiguration.xml -# test/Tier0ChainTests_devval_AtlasOffline.xml - -When installing the Tier0ChainTests package, cmt searches for the file with name: -> echo "test/Tier0ChainTests_`echo $AtlasArea | cut -d "/" -f 8,9 | sed 's/\//_/'`.xml" -(best to give this command a try yourself ;-) - -If this file is not found, the fall-back file is: -test/Tier0ChainTests_DefaultConfiguration.xml - - -scripts/ directory: -------------------- -Contains wrapper run scripts called by the tct xml files. -Just calling a script will print a usage line. -Each script has a dryrun option that can be used for testing purposes. Do: -> tct_script -d arg1 arg2 arg3 etc - -When the dryrun option is given, the tct_script wrapper is run, setting up the entire configuration, -but the underlying command is not executed. - -#### -# scripts/tct_getAmiTag.py - -Scripts for getting the latest Recotrf.py configuration from ami. -Usage: -> tct_getAmiTag.py latest ami_recotrf.cmdargs -or eg -> tct_getAmiTag.py f85 myami_recotrf.cmdargs - -the runarguments (eg preinclude, etc.) are then stored in the file: ami_recotrf.cmdargs. -This file can then be used by the script: scripts/tct_recotrf.sh - -#### -# scripts/tct_recotrf.sh - -Script that calls the Recotrf. -Usage: -> tct_recotrf.sh 0 IDCosmic 3 500 /castor/cern.ch/grid/atlas/DAQ/2008/91890 ami_recotrf.cmdargs -where: -- 0: is the job id number -- IDCosmic: is the selected trigger stream -- 500: is the number of event to run over -- /castor/cern.ch/grid/atlas/DAQ/2008/91890: is the castor directory to get collections from. - In this example files are taken from: /castor/cern.ch/grid/atlas/DAQ/2008/91890/physics_IDCosmic - Where IDCosmis is the chosen trigstream. -- ami_recotrf.cmdargs: are the command arguments obtained from running tct_getAmiTag.py. - -#### -# scripts/tct_generate_copyscript.py - -Called by scripts/tct_recotrf.sh. This script generates a dynamic script called: -copyscript.sh -which copies a unique set of input collections from castor to local disk. -These are the input collection for Recotrf. - -#### -# scripts/tct_recotrf_Prod.sh -# scripts/tct_recotrf_Tier0.sh - -At date of writing, these are copies of scripts/tct_recotrf.sh -It is foreseen that AtlasTier0 or AtlasProduction may have different Recotrf.py occasionally. -Use these scripts to account for possible differences. - -#### -# scripts/tct_mergeMonHistograms.sh - -Script for merging monitoring histograms: -Usage: -> tct_mergeMonHistograms.sh myMergedMonitoring.root myMergedMonitoring_IDCosmic_0.root myMergedMonitoring_IDCosmic_1.root etc -where: -- myMergedMonitoring.root: is the output file, and -- myMergedMonitoring_IDCosmic_0.root myMergedMonitoring_IDCosmic_1.root etc - are all input files to be merged. - -#### -# scripts/tct_dqwebdisplay.sh - -Scripts for setting up the dq webdisplay. Usage: -> tct_dqwebdisplay.sh myMergedMonitoring.root Tier0ChainTests.TCTDQWebDisplayCosmics cosmics_run.latest.hcfg cosmics_minutes10.latest.hcfg cosmics_minutes30.latest.hcfg -where -- myMergedMonitoring.root : is the output produced by scripts/tct_mergeMonHistograms.sh -- Tier0ChainTests.TCTDQWebDisplayCosmics: is the dqwebdisplay configuration setup, found in the python/ directory. - Default setup is for local running: Tier0ChainTests.LocalDQWebDisplayCosmics -- cosmics_run.latest.hcfg cosmics_minutes10.latest.hcfg cosmics_minutes30.latest.hcfg: are han configuration files. - In case not set, fall-back han files are: - hanconfigfile=/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_run.1.80.hcfg - hanconfigfile=/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_minutes10.1.12.hcfg - hanconfigfile=/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_minutes30.1.9.hcfg - -#### -# scripts/tct_CreateDB.py - -Scripts for setting up a dummy cool db. Used for testing of uploading dq flags. -This script is used inside: scripts/tct_dqwebdisplay.sh - -#### -# scripts/tct_dqutil.py - -Utility script used by: scripts/tct_dqwebdisplay.sh -Specifically, used for interpreting the dqwebdisplay configuration, eg Tier0ChainTests.TCTDQWebDisplayCosmics - -#### -# scripts/tct_fixhanrootnames.py - -Utility script used by: scripts/tct_dqwebdisplay.sh -Specifically, used for renaming produced han root files. - -#### -# scripts/tct_tagupload.sh - -Script for testing of uploading tag files to db. -(In tct, called after AODtoTAG_trf.py command.) -Usage: -tct_tagupload.sh myTag.pool.root - - -#### -# scripts/tct_dqupdatereferences.sh - -Script to create new han config files based on old han config files and new han root files. -Usage: -> tct_dqupdatereferences.sh cosmics_minutes30.latest.hcfg run__minutes30_1_han.root new_cosmics_minutes30.latest.hcfg 91890 -where : -- cosmics_minutes30.latest.hcfg is input han config file -- run__minutes30_1_han.root is the han root file -- new_cosmics_minutes30.latest.hcfg is the new han config file -- 91890 is the runnumber the han root file corresponds with. - -#### -# scripts/tct_changehanreferences.py - -Utility script called by scripts/tct_dqupdatereferences.sh -Does the actual replacing of histogram references and creates the new han config file. - - -#### -#scripts/tct_finishedMail.py - -Send a mail when finished. Usage: -> scripts/tct_finishedMail.py email@cern.ch messagebody - -#### -# scripts/tct_runAll.sh - -Script for runnign the entire tct chain in your shell. -Usage: -> tct_runAll.sh 2 IDCosmic 1 100 /castor/cern.ch/grid/atlas/DAQ/2008/0096544 - -to run two reco jobs, 1 bs collection each and over 100 events, over -IDCosmic collections from the castor directory -/castor/cern.ch/grid/atlas/DAQ/2008/0096544 - - -python/ directory: ------------------- - -#### -# python/CastorFileTool.py - -Utility class used by scripts/tct_generate_copyscript.py - -#### -# python/LocalDQWebDisplayCosmics.py -# python/TCTDQWebDisplayCosmics.py - -Two possible DQ webdisplay configuration settings. Local is the default, used for local running. TCT is run by the rtt. diff --git a/Tools/Tier0ChainTests/python/DynamicDQWebDisplayCosmics.py b/Tools/Tier0ChainTests/python/DynamicDQWebDisplayCosmics.py deleted file mode 100644 index 9e52c1a721c7defaff928382b4ae026541550679..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/python/DynamicDQWebDisplayCosmics.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -from DataQualityUtils.DQWebDisplayConfig import DQWebDisplayConfig - -dqconfig = DQWebDisplayConfig() -dqconfig.config = "TCTYesterdaysHAN" -#dqconfig.config = "TCTDQWebDisplayCosmics" -## /afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/ -#dqconfig.hcfg_min30 = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_minutes30.1.9.hcfg" -#dqconfig.hcfg = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_run.1.80.hcfg" -#dqconfig.hcfg_min10 = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_minutes10.1.12.hcfg" -dqconfig.hcfg_min30 = "cosmics_prevminutes30.latest.hcfg" -dqconfig.hcfg = "cosmics_prevrun.latest.hcfg" -dqconfig.hcfg_min10 = "cosmics_prevminutes10.latest.hcfg" -dqconfig.hanResultsDir = "/afs/cern.ch/atlas/offline/external/FullChainTest/tier0/dqm/han_results" -dqconfig.htmlDir = "/afs/cern.ch/atlas/offline/external/FullChainTest/tier0/dqm/wwwtemp" -dqconfig.htmlWeb = "http://atlas-project-fullchaintest.web.cern.ch/atlas-project-FullChainTest/tier0/dqm/wwwtemp" -dqconfig.runlist = "runlist_TCT.xml" -dqconfig.indexFile = "results_TCT.html" -dqconfig.lockFile = "" -dqconfig.dbConnection = "sqlite://;schema=MyCOOL_histo.db;dbname=COMP200" -dqconfig.dqmfOfl = "/GLOBAL/DETSTATUS/DQMFOFL" -dqconfig.dbConnectionHisto = "sqlite://;schema=MyCOOL_histo.db;dbname=COMP200" -dqconfig.dqmfOflHisto = "/GLOBAL/DETSTATUS/DQMFOFLH" -dqconfig.dbTagName = "DetStatusDQMFOFLH-TCT" -dqconfig.doHandi = True - diff --git a/Tools/Tier0ChainTests/python/LocalDQWebDisplayCosmics.py b/Tools/Tier0ChainTests/python/LocalDQWebDisplayCosmics.py deleted file mode 100644 index cce0fcadb7888979349524a002a14bffbfa0754b..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/python/LocalDQWebDisplayCosmics.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -from DataQualityUtils.DQWebDisplayConfig import DQWebDisplayConfig - -dqconfig = DQWebDisplayConfig() -dqconfig.config = "LocalDQWebDisplayCosmics" -## /afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/ -#dqconfig.hcfg_min30 = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_minutes30.1.9.hcfg" -#dqconfig.hcfg = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_run.1.80.hcfg" -#dqconfig.hcfg_min10 = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_minutes10.1.12.hcfg" -dqconfig.hcfg_min30 = "cosmics_minutes30.latest.hcfg" -dqconfig.hcfg = "cosmics_run.latest.hcfg" -dqconfig.hcfg_min10 = "cosmics_minutes10.latest.hcfg" -#dqconfig.hanResultsDir = "/afs/cern.ch/atlas/offline/external/FullChainTest/tier0/dqm/han_results" -#dqconfig.htmlDir = "/afs/cern.ch/atlas/offline/external/FullChainTest/tier0/dqm/wwwtemp" -dqconfig.hanResultsDir = "dqm/han_results" -dqconfig.htmlDir = "dqm/wwwtemp" -dqconfig.htmlWeb = "http://atlas-project-fullchaintest.web.cern.ch/atlas-project-FullChainTest/tier0/dqm/wwwtemp" -dqconfig.runlist = "runlist_TCT.xml" -dqconfig.indexFile = "results_TCT.html" -dqconfig.lockFile = "" -dqconfig.dbConnection = "sqlite://;schema=MyCOOL_histo.db;dbname=COMP200" -dqconfig.dqmfOfl = "/GLOBAL/DETSTATUS/DQMFOFL" -#dqconfig.dbConnectionHisto = "sqlite://;schema=MyCOOL_histo.db;dbname=COMP200" -#dqconfig.dqmfOflHisto = "/GLOBAL/DETSTATUS/DQMFOFLH" -dqconfig.dbTagName = "DetStatusDQMFOFLH-TCT" -dqconfig.doHandi = True - diff --git a/Tools/Tier0ChainTests/python/ReleaseDQWebDisplayCosmics.py b/Tools/Tier0ChainTests/python/ReleaseDQWebDisplayCosmics.py deleted file mode 100644 index aff27f9028e4756a3a80a6880366e9d5aea8b3c5..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/python/ReleaseDQWebDisplayCosmics.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration - -from DataQualityConfigurations.data09_900GeV import dqconfig - -dqconfig.config = "TCTReleaseHAN" -dqconfig.hcfg = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/tct_reference_run.hcfg" -dqconfig.hcfg_min10 = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/tct_reference_minutes10.hcfg" -dqconfig.hcfg_min30 = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/tct_reference_minutes30.hcfg" -dqconfig.hanResultsDir = "/afs/cern.ch/atlas/offline/external/FullChainTest/tier0/dqm/han_results" -dqconfig.htmlDir = "/afs/cern.ch/atlas/offline/external/FullChainTest/tier0/dqm/wwwtemp" -dqconfig.htmlWeb = "http://atlas-project-fullchaintest.web.cern.ch/atlas-project-FullChainTest/tier0/dqm/wwwtemp" -dqconfig.runlist = "runlist_TCT.xml" -dqconfig.indexFile = "results_TCT.html" -dqconfig.lockFile = "" -dqconfig.dbConnection = "sqlite://;schema=MyCOOL_histo.db;dbname=COMP200" -dqconfig.dqmfOfl = "/GLOBAL/DETSTATUS/DQMFOFL" -dqconfig.dbConnectionHisto = "sqlite://;schema=MyCOOL_histo.db;dbname=COMP200" -dqconfig.dqmfOflHisto = "/GLOBAL/DETSTATUS/DQMFOFLH" -dqconfig.dbTagName = "DetStatusDQMFOFLH-TCT" -dqconfig.doHandi = True - -dqconfig.auth = "" -dqconfig.server = "" -dqconfig.histogramCache = "" -dqconfig.webHandoffDir = "" diff --git a/Tools/Tier0ChainTests/python/TCTDQWebDisplayCosmics.py b/Tools/Tier0ChainTests/python/TCTDQWebDisplayCosmics.py deleted file mode 100644 index a602c64813f22cb93e4bb9bad4ebc782fbee7f3f..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/python/TCTDQWebDisplayCosmics.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -from DataQualityUtils.DQWebDisplayConfig import DQWebDisplayConfig - -dqconfig = DQWebDisplayConfig() -dqconfig.config = "TCTStaticHAN" -#dqconfig.config = "TCTDQWebDisplayCosmics" -## /afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/ -#dqconfig.hcfg_min30 = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_minutes30.1.9.hcfg" -#dqconfig.hcfg = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_run.1.80.hcfg" -#dqconfig.hcfg_min10 = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_minutes10.1.12.hcfg" -dqconfig.hcfg_min30 = "cosmics_minutes30.latest.hcfg" -dqconfig.hcfg = "cosmics_run.latest.hcfg" -dqconfig.hcfg_min10 = "cosmics_minutes10.latest.hcfg" -dqconfig.hanResultsDir = "/afs/cern.ch/atlas/offline/external/FullChainTest/tier0/dqm/han_results" -dqconfig.htmlDir = "/afs/cern.ch/atlas/offline/external/FullChainTest/tier0/dqm/wwwtemp" -dqconfig.htmlWeb = "http://atlas-project-fullchaintest.web.cern.ch/atlas-project-FullChainTest/tier0/dqm/wwwtemp" -dqconfig.runlist = "runlist_TCT.xml" -dqconfig.indexFile = "results_TCT.html" -dqconfig.lockFile = "" -dqconfig.dbConnection = "sqlite://;schema=MyCOOL_histo.db;dbname=COMP200" -dqconfig.dqmfOfl = "/GLOBAL/DETSTATUS/DQMFOFL" -dqconfig.dbConnectionHisto = "sqlite://;schema=MyCOOL_histo.db;dbname=COMP200" -dqconfig.dqmfOflHisto = "/GLOBAL/DETSTATUS/DQMFOFLH" -dqconfig.dbTagName = "DetStatusDQMFOFLH-TCT" -dqconfig.doHandi = True - diff --git a/Tools/Tier0ChainTests/python/__init__.py b/Tools/Tier0ChainTests/python/__init__.py deleted file mode 100644 index 74583d364ec2ca794156596c7254d9b234a940c6..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/python/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - diff --git a/Tools/Tier0ChainTests/python/newalg_bincontentcomp.py b/Tools/Tier0ChainTests/python/newalg_bincontentcomp.py deleted file mode 100644 index da4f46095f6ef8ae4e4345a4b559935ed70e2e1c..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/python/newalg_bincontentcomp.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration - -from DQConfMakerBase.DQElements import DQAlgorithm, DQAlgorithmParameter, DQThreshold - -_alg = DQAlgorithm(id='BinContentComp', libname='libdqm_algorithms.so') -_algpars = [DQAlgorithmParameter(name='NSigma', value=0.001), - DQAlgorithmParameter(name='FixedError', value=0.001)] -_thresholds = [{'green': DQThreshold(name='NBins', value=0), - 'red': DQThreshold(name='NBins', value=0)} - ] - -def alg(obj): - return [_alg, _algpars, _thresholds, None] - -_summaryalg = DQAlgorithm(id='WorstCaseSummary', libname='libdqm_summaries.so') -def summaryalg(obj): - return _summaryalg diff --git a/Tools/Tier0ChainTests/scripts/PathConstants.py b/Tools/Tier0ChainTests/scripts/PathConstants.py deleted file mode 100644 index b6d7763ff496fbc27fa116704e0480e30058bfa1..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/PathConstants.py +++ /dev/null @@ -1,251 +0,0 @@ -"""Provide hardcoded path related strings shared by a number of modules. - -Trivial (value = name) strings: -cmt -share -bin -installArea - -Ex-Variables - names tht could once take more than one value: -releaseType -runType - -names that take one value -cmtConfigFileMacroName - -RTT project renaming anomally: -projectDict - -Require updates -branches list of legal branches - -Dangerous Kludges -projectMap - list of sub projects - use by findJobOptions - FIXME -""" - -import os.path - - -share = 'share' -cmt = 'cmt' - -# historical - still ocurs in summary overview. Alternative was monolithic (?) -releaseType = 'project' - -# historical - still ocurs in summary overview and results paths. -# Alternative was kit. Note there is a PathNameBuilder.runType -# (dev,prod,batch,shell) that is not the same thing as this relic. -runType = 'build' - -installArea = 'InstallArea' - -# actual name of the macro is <package>_TestConfiguration -cmtConfigFileMacroName = '_TestConfiguration' - -distArea = '/afs/cern.ch/atlas/software/builds' - -eosProjSitRTTPath = '/eos/atlas/atlascerngroupdisk/proj-sit/rtt' - -eosBinDir = '/afs/cern.ch/project/eos/installation/atlas/bin' # not needed any more? HAZ -eosLibDir = '/afs/cern.ch/project/eos/installation/atlas/lib64'# not needed any more? HAZ -#eos = os.path.join(eosBinDir, 'eos.select') # alias to the EOS command -eos = 'eos' - - -# The path to the private file containing RTT connection params for AMI. -# This file is sourced before launching the user job. -amiConnectionFile = '/afs/cern.ch/user/r/rtt/private/amiConnection.sh' - -rttExternals = '/afs/cern.ch/atlas/project/RTT/Externals' - -# 31/5/2012. DCubeRunner uses dcubeRefBaseDir as a base dir for data files -dcubeRefBaseDir = '/afs/cern.ch/atlas/project/RTT/DCubeReference' - -# 31/5/2012. DCubeRunner uses dcubeCfgBaseDir as a base dir for cfg files -dcubeCfgBaseDir = '/afs/cern.ch/atlas/project/RTT/Results/reference/' - -rttBases = {'prod': '/afs/cern.ch/atlas/project/RTT/prod', - 'dev': '/afs/cern.ch/atlas/project/RTT/dev' - } - -projectDict = {'AtlasHLT': 'hlt', - 'AtlasProduction': 'offline', - 'AtlasPoint1': 'point1', - 'AtlasTier0': 'tier0', - 'AtlasP1HLT': 'p1hlt'} - -invProjectDict = {} -for k, v in projectDict.items(): - invProjectDict[v] = k - -resultsBaseURL = 'https://atlas-rtt.cern.ch/' - -projects_2017 = [ - 'Athena', - 'AthenaP1', - ] - -projects = [ - 'AtlasAnalysis', - 'AtlasLogin', - 'AtlasSetup', - 'AtlasConditions', - 'AtlasOffline', - 'AtlasSimulation', - 'AtlasCore', - 'AtlasProduction', - 'AtlasTrigger', - 'AtlasEvent', - 'AtlasReconstruction', - 'AtlasHLT', - 'AtlasPoint1', - 'AtlasTier0', - 'AtlasP1HLT', - 'AtlasCAFHLT', - 'TopPhys', - 'TAGProd', - 'WZBenchmarks', - 'TrigMC', - 'AtlasDerivation', - 'AtlasProd1', - 'AtlasProd2', - 'AtlasP1MON', - 'AthAnalysisBase', - 'AtlasTestHLT', - 'AtlasPhysics', - 'Athena'] - -branches_2017 = ['master', - '21.0', - '21.0.X',] - - -branches = [ - 'cmake', - 'dev', - 'devval', - '19.0.X', - '19.0.X.Y', - '19.0.X-VAL', - '19.0.X.Y-VAL', - '19.0.X.Y-VAL2', - '19.1.X', - '19.1.X.Y', - '19.1.X.Y-VAL', - '19.2.X-VAL', - '19.2.X.Y', - '19.2.X.Y-VAL', - '19.1.X-VAL', - '19.1.0.Y-VAL', - '19.1.1.Y-VAL', - '19.1.3.Y-VAL', - '19.3.X.Y', - '19.3.X.Y-VAL', - '20.X.0', - '20.X.0-VAL', - '20.0.X.Y', - '20.0.X.Y-VAL', - '20.1.0.Y', - '20.1.0.Y-VAL', - '20.1.0.Y-VAL2', - '20.1.1.Y', - '20.1.1.Y-VAL', - '20.1.1.Y-VAL2', - '20.1.2.Y-VAL', - '20.1.2.Y-VAL2', - '20.1.3.Y-VAL', - '20.1.3.Y-VAL2', - '20.1.4.Y', - '20.1.4.Y-VAL', - '20.1.5.Y', - '20.1.5.Y-VAL', - '20.1.5.Y.Z-VAL', - '20.1.X', - '20.1.X-VAL', - '20.1.X.Y', - '20.1.X.Y-VAL', - '20.1.X.Y.Z', - '20.1.X.Y.Z-VAL', - '20.2.X', - '20.2.X-VAL', - '20.2.X.Y', - '20.2.X.Y-VAL', - '20.2.X.Y.Z-VAL', - '20.2.X.Y.Z-VAL2', - '20.3.X', - '20.3.X-VAL', - '20.3.X.Y', - '20.3.X.Y-VAL', - '20.3.3.Y-VAL', - '20.3.4.Y-VAL', - '20.3.0.Y-VAL', - '20.6.X', - '20.6.X-VAL', - '20.6.X.Y', - '20.6.X.Y-VAL', - '20.7.X', - '20.7.X-VAL', - '20.7.X.Y', - '20.7.X.Y-VAL', - '20.7.X.Y.Z-VAL', - '20.7.1.Y-VAL', - '20.7.2.Y-VAL', - '20.7.3.Y', - '20.7.3.Y-VAL', - '20.7.3.Y.Z-VAL', - '20.7.4.Y', - '20.7.4.Y-VAL', - '20.7.5.Y-VAL', - '20.7.5.Y.Z-VAL', - '20.7.6.Y-VAL', - '20.7.8.Y-VAL', - '20.7.9.Y-VAL', - '20.99.Y-VAL', - '20.11.X.Y-VAL', - '20.11.X.Y.Z-VAL', - '20.20.X', - '20.20.X-VAL', - '20.20.X.Y-VAL', - '21.0.X', - '21.0.X-VAL', - '21.0.X.Y-VAL', - '21.9.X-VAL', - '22.0.X-master', - '2.0.X', - '2.1.X', - '2.3.X', - '2.4.X', - 'EXP', - 'mig0', - 'mig1', - 'mig2', - 'mig3', - 'mig4', - 'mig5', - 'mig6', - 'mig7', - 'mig8', - 'mig9', - 'mig10', - 'mig11', - 'mig12', - 'mig13', - 'mig14', - 'mig15', - 'devmig1', - 'devmig2', - 'devmig3', - 'devmig4', - 'devmig5', - 'devmig6', - 'lcg4'] - - -_dbconnectionDir = '/afs/cern.ch/atlas/project/RTT/dbconnection/' -cooldozer = '/afs/cern.ch/atlas/project/RTT/prod/Results/dozer/api/latest' -db_location = { - ('rtt', 'prod'): os.path.join(_dbconnectionDir, 'oracle.prod'), - ('rttdev', 'dev'): os.path.join(_dbconnectionDir, 'oracle.dev') -} - -branchdir_2017 = '/cvmfs/atlas-nightlies.cern.ch/repo/sw' diff --git a/Tools/Tier0ChainTests/scripts/get_release_2017.py b/Tools/Tier0ChainTests/scripts/get_release_2017.py deleted file mode 100644 index 57a4743fb0a8ef33d5539f1248b31fbd0dd8568e..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/get_release_2017.py +++ /dev/null @@ -1,195 +0,0 @@ -"""get_release: returns the path to a build provided that such a build -exists, and that it is recent.""" - -import os -import re -from time import sleep -from datetime import (datetime, - time, - timedelta) -import PathConstants - -fn_re = re.compile(r'^(?P<year>20\d\d)-(?P<month>\d\d)-(?P<day>\d\d)T\d\d\d\d$') -build_day_start = 20 - -def fn2dt(fn): return datetime.strptime(fn, "%Y-%m-%dT%H%M") - -def fn2asetup_date(fn): - m = fn_re.match(fn) - return 'r%s-%s-%s' % (m.group('year'), m.group('month'), m.group('day')) - -def _get_build_in_period(branch, period_start, period_end): - """ - period_end - end of build persiod under conssideration: - either build_day_start for today or for tomorrow - depending on the start time of the RTT. - - full dir eg - /cvmfs/atlas-nightlies.cern.ch/repo/sw/master/2017-03-09T2245 - """ - - # find if any branchs are recent - basedir = PathConstants.branchdir_2017 - found = [fn for fn in os.listdir(os.path.join(basedir, branch)) - if fn_re.match(fn)] - - def is_recent(fn): - fn_dt = fn2dt(fn) - return fn_dt >= period_start and fn_dt < period_end - - - # print found - recent = [f for f in found if is_recent(f)] - if not recent: - print 'no file available for period %s - %s in %s' % ( - str(period_start), - str(period_end), - os.path.join(PathConstants.branchdir_2017, branch) - ) - return '' - # examine build for matching cmt config paramters - # start search form the most recent, to back in time. - - print 'recent', recent - recent = sorted( - recent, - cmp=lambda x,y: cmp(fn2dt(x), fn2dt(y)), - reverse=True) - return recent[0] - - -def _get_release(release_datetime): - """return the release (rel_X) for a datetime within the current - build persiod. The current build period starts at the build_start_time - of the same day if the rtt starts up at a time after the build_start_time, - otherwise the build_start_time of the previous day""" - - - # python calls Sunday 6, Monday 0, ATLAS: Sunday 0, Monday 1 - rel = release_datetime.weekday() + 1 - if rel == 7: rel = 0 - - # but if the build starts after build_day_start, consider - # it to be the next day - - # so, a build starting after build_day_start on sunday - # will be assigned 6 by python, converted to 0 above. - # further it will be reassigned to 1 as it starts after - # build_day_start - if release_datetime.hour >= build_day_start: rel += 1 - if rel == 7: rel = 0 - - return 'rel_%d' % rel - -def get_period_start(dt): - - result = dt.replace( - hour=build_day_start, - minute=0, - second=0, - microsecond=0) - - if dt.hour < build_day_start: - result = result - timedelta(days=1) - - return result - - -def get_release(branch, cmtconfig, check_datetime=None): - """Return a release (string matching rel_[0,6]) if a build occurs - during a period of interest, '' otherwise. - If the RTT check time is after build_day_start, the period of - interest starts at the same day at build_day_start, otherwise - at build_day_start of the previous day. It terminates 24 hours later. - - If no such build exists, the function waits for it to arrive, or - the current time exits the persiod of interest. - """ - if check_datetime is None: check_datetime = datetime.now() - - period_start = get_period_start(check_datetime) - print period_start - period_end = period_start + timedelta(days=1) - - while True: - fn = _get_build_in_period(branch, - period_start, - period_end) - if not fn: - print 'get_release_2017 no file available, sleeping' - sleep(60*5) # no file available, sleep 5 mins - else: - return (fn, _get_release(fn2dt(fn)), fn2asetup_date(fn)) - -if __name__ == '__main__': - import sys - branch = sys.argv[1] - cmtconfig = sys.argv[2] - - # eg python get_release_2017.py master x86_64-slc6-gcc49-opt - - - - print 'calling _get_build_in_period', branch, cmtconfig - period_start = get_period_start(datetime.now()) - period_end = period_start + timedelta(days=1) - - fn = _get_build_in_period(branch, period_start, period_end) - print 'release file: -->%s<--' % fn - - # the call to get release waits for a suitable build to appear, - # so may hang. - # fn, rel = get_release(datetime.now(), branch, cmtconfig) - # print 'ran get_release', branch, cmtconfig - # print fn, rel - - print 'calling _get_release' - - early = build_day_start - 1 - late = build_day_start + 1 - - assert early in range(24) - assert late in range(24) - - sunday_early = datetime(year=2017, - month=3, - day=12, - hour=early) - - sunday_late = sunday_early.replace(hour=late) - - assert _get_release(sunday_early) == 'rel_0' - assert _get_release(sunday_late) == 'rel_1' - - - print '\ntemp debug\n' - now = datetime.now() - now_1 = now - timedelta(days=1) - now_2 = now - timedelta(days=2) - now_3 = now - timedelta(days=3) - - conf = 'x86_64-slc6-gcc62-opt' - branch = 'master' - - print branch, conf - print get_release(branch, conf) - print get_release(branch, conf, now) - print get_release(branch, conf, now_1) - print get_release(branch, conf, now_2) - print get_release(branch, conf, now_3) - - - branch = '21.0' - print - print branch, conf - print - print get_release(branch, conf) - print get_release(branch, conf, now) - print get_release(branch, conf, now_1) - print get_release(branch, conf, now_2) - print get_release(branch, conf, now_3) - - print 'ok' - - - diff --git a/Tools/Tier0ChainTests/scripts/tct_CreateDB.py b/Tools/Tier0ChainTests/scripts/tct_CreateDB.py deleted file mode 100755 index c4005a3bda22c679c8bbcf469fc282306d072cb7..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_CreateDB.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import sys -import os, os.path - -createdbhisto = 'CreateDB_Histo.py' -patharr = os.environ["PATH"].split(":") - -fileFound = False -for path in patharr: - file = os.path.join( path, createdbhisto ) - if os.path.exists( file ): - createdbhisto = file - fileFound = True - break - -if not fileFound: - print createdbhisto + " not found. Exit." - sys.exit(1) - -if len(sys.argv)>=2: - sys.argv[1] = "createdb" -else: sys.argv.append("createdb") - -execfile(createdbhisto) - diff --git a/Tools/Tier0ChainTests/scripts/tct_buildPickleFile.py b/Tools/Tier0ChainTests/scripts/tct_buildPickleFile.py deleted file mode 100755 index c3eb216ae1eb1ffa9fbb030eb34e0523d6661470..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_buildPickleFile.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration -# -# usage: tct_buildPickleFile.py pickleFileName inputFileList outputFileName - -import sys,pickle - -if len(sys.argv) < 3: - print "usage: tct_buildPickleFile.py pickleFileName inputFileList outputFileName" - sys.exit() - -data = {'inputROOTFiles':[],\ - 'outputROOTFile':sys.argv[3]+"#"+sys.argv[3]} - -inputFiles = open(sys.argv[2],'r') -lines = inputFiles.readlines() -[ data['inputROOTFiles'].append(line.strip('\n')+"#"+line.strip('\n')) for line in lines ] - -output = open(sys.argv[1],'w') -pickle.dump(data,output) -output.close() diff --git a/Tools/Tier0ChainTests/scripts/tct_changehanalgorithms.py b/Tools/Tier0ChainTests/scripts/tct_changehanalgorithms.py deleted file mode 100755 index c56c371e338182edf722ab894bd31c2fb91832f8..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_changehanalgorithms.py +++ /dev/null @@ -1,160 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -""" -Replace all algorithms in a han configuration file by a new one, then output -new binary configuration file. -""" -#@author: ponyisi@hep.uchicago.edu -#@date: 13 Nov 2008 - -from DQConfMakerBase.DQElements import * -from DQConfMakerBase.Helpers import IDHelper, make_thresholds -from DQHanConfMaker.hanwriter import writeHanConfiguration -import ROOT - -def makeDQObject(obj, newalg, opts): - name = obj.GetName() - #algname = obj.GetAlgName() - #alglib = obj.GetAlgLibName() - #algref = obj.GetAlgRefName() - annotations = obj.GetAllAnnotations() - #print algref - if opts.keepweights: - weight = obj.GetWeight() - else: - weight = 1 - if isinstance(obj, ROOT.dqi.HanConfigGroup): - if opts.keepsummaries: - alg = getAlgorithm(obj) - else: - alg = newalg.summaryalg(obj) - dqreg = DQRegion(id=name, algorithm=alg, weight=weight) - return dqreg - else: - newalgparm = newalg.alg(obj) - alg = newalgparm[0] - algpars = newalgparm[1] - thresholds = newalgparm[2] - algref = newalgparm[3] - if algref == None: - if obj.GetAlgRefName() != '': - algref = [DQReference(reference=options.fname + ':'+obj.GetAlgRefName())] - else: - algref = None - dqpar = DQParameter(id=name.rpartition('/')[2], algorithm=alg, - algorithmparameters=algpars, - weight=weight, - thresholds=thresholds, references=algref, - inputdatasource=name) - for annotation in annotations: - if annotation.GetName() == 'inputname': - continue - dqpar.addAnnotation(annotation.GetName(), annotation.GetValue()) - if obj.GetIsRegex(): - dqpar.addAnnotation('regex', 1) - return dqpar - - -def recurse(newalg, obj, dqobj, opts): - #print obj.GetName() - if isinstance(obj, ROOT.dqi.HanConfigGroup): - for x in obj.GetAllAssessors(): - newdqobj = makeDQObject(x, newalg, opts) - dqobj.addDQParameter(newdqobj) - recurse(newalg, x, newdqobj, opts) - for x in obj.GetAllGroups(): - newdqobj = makeDQObject(x, newalg, opts) - dqobj.addDQRegion(newdqobj) - recurse(newalg, x, newdqobj, opts) - -def getAlgorithm(obj): - alg = DQAlgorithm(id=obj.GetAlgName(), libname=obj.GetAlgLibName()) - return alg - -def do_composites(f): - pass -## rv = [] -## for k in f.GetListOfKeys(): -## cl = ROOT.TClass.GetClass(k.GetClassName()) -## if cl.InheritsFrom('dqi::HanConfigCompAlg'): -## obj = k.ReadObj() -## print obj.GetName() -## nobj = CompositeAlgorithm(id=k.GetName(), -## subalgorithms=[DQAlgorithm(id=x) for x in 'GatherData']) - -def copy_compalgs(inf, out): - fin = ROOT.TFile(inf, 'READ') - fout = ROOT.TFile(out, 'UPDATE') - fout.cd() - for k in fin.GetListOfKeys(): - cl = ROOT.TClass.GetClass(k.GetClassName()) - if cl.InheritsFrom('dqi::HanConfigCompAlg'): - obj = k.ReadObj() - #fout.cd() - obj.Write() - fin.Close() - fout.Close() - - -if __name__ == '__main__': - import sys, tempfile, optparse - parser = optparse.OptionParser() - parser.add_option('--keepweights', action='store_true', default=False, - help='Keep original check weights from file; default resets them to 1') - parser.add_option('--keepsummaries', action='store_true', default=False, - help='Keep original summary algorithms; default changes them using summaryalg()') - (options, args) = parser.parse_args() - options.fname = args[0] - - if len(args) != 3: - print 'This script replaces all the reference histograms by those of the same' - print 'name in a new file.' - print - print 'usage: %s <inputhcfg> <newalgpy> <outputhcfg> [--keepweights] [--keepsummaries]' % sys.argv[0] - print ' --> inputhcfg and outputhcfg files are binary configurations' - print ' --> newalgpy is a python file with an alg() function that' - print ' returns a configured algorithm in the DQConfMakerSense' - print ' --> use --keepweights to keep the weights in the original configuration,' - print ' otherwise they will be reset to 1' - print ' --> use --keepsummaries to keep the original summary algorithms,' - print ' otherwise a summaryalg() function is expected in newalgpy' - sys.exit(3) - - if ROOT.gSystem.Load('libDataQualityInterfaces') != 0: - print 'Cannot load libDataQualityInterfaces.so. Perhaps you need to do some CMT magic.' - sys.exit(4) - - infile = ROOT.TFile.Open(args[0], 'READ') - if infile == None: - print 'cannot find', args[0] - sys.exit(1) - topgroup = infile.Get('top_level') - if topgroup == None: - print 'cannot find top_level configuration in config file' - sys.exit(2) - topalg = getAlgorithm(topgroup) - topreg = DQRegion(id='topRegion', algorithm=topalg) - newalgpy = args[1] - - if newalgpy[-3:] == '.py': - newalgpy = newalgpy[:-3] - - newalg = __import__(newalgpy, fromlist=['alg']) - - recurse(newalg, infile.Get('top_level'), topreg, options) - #do_composites(infile) - - sublevel = topreg.getSubRegions()[:] - for x in sublevel: - topreg.delRelation('DQRegions', x) - - tmp = tempfile.NamedTemporaryFile() - writeHanConfiguration(filename=tmp.name, roots=sublevel) - #writeHanConfiguration(filename='output.config', roots=sublevel) - #import shutil - #shutil.copy(tmp.name, 'newc_' + args[2].replace('hcfg', 'config')) - ROOT.dqi.HanConfig().AssembleAndSave( tmp.name, args[2] ) - tmp.close() - #copy_compalgs(args[0], args[2]) diff --git a/Tools/Tier0ChainTests/scripts/tct_changehanreferences.py b/Tools/Tier0ChainTests/scripts/tct_changehanreferences.py deleted file mode 100755 index 578b5f3ed1ea96e30402bbc5279ff64bd17f8c27..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_changehanreferences.py +++ /dev/null @@ -1,221 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -""" -Replace all references in a han configuration file by a new file, then output -new configuration. -""" -#@author: ponyisi@hep.uchicago.edu -#@date: 13 Nov 2008 - -from DQConfMakerBase.DQElements import * -from DQConfMakerBase.Helpers import IDHelper, make_thresholds -from DQHanConfMaker.hanwriter import writeHanConfiguration -import ROOT - -fcache = {} - -def test_obj_existence(newref, name): - prune, dum, tail = newref.rpartition(':') - if prune in fcache: - f = fcache[prune] - else: - print 'Opening', prune - f = ROOT.TFile.Open(prune) - fcache[prune] = f - import os - o = f.Get(os.path.join(tail, name)) - if o == None: - #print name, 'FAIL', f, os.path.join(tail, name) - #f.Close() - return False - else: - #print name, 'PASS', f, os.path.join(tail, name) - #f.Close() - return True - -def makeDQObject(obj, newref, prefix, opts): - name = obj.GetName() - if opts.hanformat: - if prefix == '': - newrefname = name.rpartition('/')[2] - else: - newrefname = prefix + '/' + name.rpartition('/')[2] - #print newrefname - else: - if not isinstance(obj, ROOT.dqi.HanConfigGroup): - if obj.GetAnnotation('inputname'): - newrefname = obj.GetAnnotation('inputname').GetValue() - else: - newrefname = name - else: - newrefname = name - algname = obj.GetAlgName() - alglib = obj.GetAlgLibName() - algref = obj.GetAlgRefName() - if algref != '' or (opts.forceref and not isinstance(obj, ROOT.dqi.HanConfigGroup)): - if test_obj_existence(newref, newrefname): - algref = [DQReference(reference=newref+newrefname)] - elif algref != '' and opts.fallback: - algref = [DQReference(reference=sys.argv[1]+':'+algref)] - else: - algref = None - print 'Unable to find a good reference for', name - else: - algref = None - algpars = [DQAlgorithmParameter(name=x.GetName(), value=x.GetValue()) - for x in obj.GetAllAlgPars()] - thresholds = [{'green': DQThreshold(name=x.GetName(), value=x.GetGreen()), - 'red': DQThreshold(name=x.GetName(), value=x.GetRed())} - for x in obj.GetAllAlgLimits()] - #alglimitsitr = obj.GetAllAlgLimits() - annotations = obj.GetAllAnnotations() - alg = DQAlgorithm(id=algname, libname=alglib) - #print algref - if isinstance(obj, ROOT.dqi.HanConfigGroup): - dqreg = DQRegion(id=name, algorithm=alg, weight=obj.GetWeight()) - return dqreg - else: - dqpar = DQParameter(id=name.rpartition('/')[2], algorithm=alg, - algorithmparameters=algpars, - weight=obj.GetWeight(), - thresholds=thresholds, references=algref, - inputdatasource=name) - anndict = {} - for annotation in annotations: - if annotation.GetName() == 'inputname': - continue - anndict[annotation.GetName()] = annotation.GetValue() - if options.nonorm: - if 'display' not in anndict: - anndict['display'] = 'NoNorm' - else: - if 'NoNorm' not in anndict['display']: - anndict['display'] += ',NoNorm' - for annkey, annval in anndict.items(): - dqpar.addAnnotation(annkey, annval) - if obj.GetIsRegex(): - dqpar.addAnnotation('regex', 1) - return dqpar - - -def recurse(fname, obj, dqobj, opts, prefix = ''): - #print obj.GetName() - if isinstance(obj, ROOT.dqi.HanConfigGroup): - if prefix == '/top_level': - newprefix = obj.GetName() - else: - newprefix = prefix + '/' + obj.GetName() - for x in obj.GetAllAssessors(): - newdqobj = makeDQObject(x, fname, newprefix, - opts) - dqobj.addDQParameter(newdqobj) - recurse(fname, x, newdqobj, opts, newprefix) - for x in obj.GetAllGroups(): - newdqobj = makeDQObject(x, fname, newprefix, - opts) - dqobj.addDQRegion(newdqobj) - recurse(fname, x, newdqobj, opts, newprefix) - -def getAlgorithm(obj): - alg = DQAlgorithm(id=obj.GetAlgName(), libname=obj.GetAlgLibName()) - return alg - -def do_composites(f): - pass -## rv = [] -## for k in f.GetListOfKeys(): -## cl = ROOT.TClass.GetClass(k.GetClassName()) -## if cl.InheritsFrom('dqi::HanConfigCompAlg'): -## obj = k.ReadObj() -## print obj.GetName() -## nobj = CompositeAlgorithm(id=k.GetName(), -## subalgorithms=[DQAlgorithm(id=x) for x in 'GatherData']) - -def copy_compalgs(inf, out): - fin = ROOT.TFile(inf, 'READ') - fout = ROOT.TFile(out, 'UPDATE') - fout.cd() - for k in fin.GetListOfKeys(): - cl = ROOT.TClass.GetClass(k.GetClassName()) - if cl.InheritsFrom('dqi::HanConfigCompAlg'): - obj = k.ReadObj() - #fout.cd() - obj.Write() - fin.Close() - fout.Close() - - -if __name__ == '__main__': - import sys, tempfile, optparse, urllib, os - parser = optparse.OptionParser() - parser.add_option('--hanformat', action='store_true', default=False, - help='Reference file is DQMF output') - parser.add_option('--forceref', action='store_true', default=False, - help="Add reference to all checks, even if original didn't have one") - parser.add_option('--fallback', action='store_true', default=False, - help='Fall back to original reference if new reference is not available') - parser.add_option('--nonorm', action='store_true', default=False, - help='Do not normalize references') - (options, args) = parser.parse_args() - - if len(args) != 3: - print 'This script replaces all the reference histograms by those of the same' - print 'name in a new file.' - print - print 'usage: %s <inputhcfg> <newrefpath> <outputhcfg> [--hanformat] [--forceref] [--fallback] [--nonorm]' % sys.argv[0] - print ' --> inputhcfg and outputhcfg files are binary configurations' - print ' --> newrefpath is the new reference file you want to use in' - print ' the format rootfile.root:top_path' - print ' for example newref.root:run_92226' - print ' --> use --hanformat if the new reference file was produced by DQMF' - print ' --> use --forceref if you want to add references to all histograms, ' - print ' not just the ones that already have references' - print ' --> use --fallback if you want to use the existing reference if no new' - print ' reference is available' - print " --> use --nonorm if you don't want to normalize the references to data" - print ' histogram area' - sys.exit(3) - - if ROOT.gSystem.Load('libDataQualityInterfaces') != 0: - print 'Cannot load libDataQualityInterfaces.so. Perhaps you need to do some CMT magic.' - sys.exit(4) - - infile = ROOT.TFile.Open(args[0], 'READ') - if infile == None: - print 'cannot find', args[0] - sys.exit(1) - topgroup = infile.Get('top_level') - if topgroup == None: - print 'cannot find top_level configuration in config file' - sys.exit(2) - topalg = getAlgorithm(topgroup) - topreg = DQRegion(id='topRegion', algorithm=topalg) - newref = args[1] - urlname = newref.rsplit(':',1)[0] - localnewreffile, dum = urllib.urlretrieve(newref.rsplit(':',1)[0]) - localnewref = newref.replace(urlname, localnewreffile) - patchref = localnewref - if ':' not in patchref: - patchref += ':' - if not options.hanformat and patchref[-1] != '/': - patchref += '/' - - recurse(patchref, infile.Get('top_level'), topreg, options) - do_composites(infile) - - sublevel = topreg.getSubRegions()[:] - for x in sublevel: - topreg.delRelation('DQRegions', x) - - tmp = tempfile.NamedTemporaryFile() - writeHanConfiguration(filename=tmp.name, roots=sublevel) - #writeHanConfiguration(filename='output.config', roots=sublevel) - import shutil - #shutil.copy(tmp.name, 'newc_' + args[2].replace('hcfg', 'config')) - ROOT.dqi.HanConfig().AssembleAndSave( tmp.name, args[2] ) - tmp.close() - copy_compalgs(args[0], args[2]) - if localnewref != newref: - os.unlink(localnewreffile) diff --git a/Tools/Tier0ChainTests/scripts/tct_dqflagscheck.sh b/Tools/Tier0ChainTests/scripts/tct_dqflagscheck.sh deleted file mode 100755 index ac458ff7b1000fe21cb7ba4a2a09ce8804bbee3c..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_dqflagscheck.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/sh - -## Check arguments -if [ $# -lt 1 ] ; then - echo "Usage: tct_dqflagscheck.sh <hanresultsfile> [<dqflagslist>] [<comparisonlist>]" - exit 1 -fi - -## set configuration parameters -hanresultsfile=$1 -if [ ! -f $hanresultsfile ]; then - echo "ERROR: Input han file <$hanresultsfile> not found. Exit." - exit 1 -fi - -dqflagslist="dqflags.list" -if [ $# -gt 1 ] ; then - dqflagslist=$2 -fi - -comparisonlist="None" -if [ $# -gt 2 ] ; then - comparisonlist=$3 -fi - -## run command -RUNCMD="han-results-print.exe ${hanresultsfile}" -#han-results-print.exe run__han.root 2>&1 | tee dqflags.list; echo "ErrorCode=0 (OK) -$RUNCMD 2>&1 | tee $dqflagslist - -## check overall status -status="Red" -if [ ! -f $comparisonlist ]; then - status=`grep top_level ${dqflagslist} | gawk '{print $2}'` - cp -f ${dqflagslist} summary.txt -else - dqflagsdiff=`diff ${dqflagslist} ${comparisonlist}` - echo ${dqflagsdiff} > summary.txt - if [ "$dqflagsdiff" = "" ]; then - status="Green" - else - echo - echo "ERROR: Difference in dq flag lists between today and yesterday." - echo ${dqflagsdiff} - echo - fi -fi - -echo -echo "Input han root file : ${hanresultsfile}" -echo "Output Data Quality flags list : ${dqflagslist}" -echo "Comparison list : ${comparisonlist}" -echo "Overall Data Quality status : ${status}" -echo - -if [ "$status" = "Green" ] ; then - echo "TestCode=SUCCESS" -else - echo "TestCode=FAILED" -fi -echo "ErrorCode=0 (OK)" - diff --git a/Tools/Tier0ChainTests/scripts/tct_dqupdatereferences.sh b/Tools/Tier0ChainTests/scripts/tct_dqupdatereferences.sh deleted file mode 100755 index aa941de7675a278c6c1e42bf498246a3a69b518c..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_dqupdatereferences.sh +++ /dev/null @@ -1,81 +0,0 @@ -#!/bin/sh - -## Check arguments -if [ $# -lt 4 ] ; then - echo "Usage: tct_dqupdatereferences.sh <inputhcfg> <newreffile> <outputhcfg> <runnumber>" - exit 1 -fi - -## Intercept dryrun option if given -DRYRUN=0 -if [ "$1" = "-d" ] ; then - DRYRUN=1 - shift -fi - -## set configuration parameters -inputhcfg=$1 -newrefpath=$2 -outputhcfg=$3 -runnumber=$(echo $4 | bc) -if [ ! -f $inputhcfg ]; then - echo "ERROR: input han confuguration file <$inputhcfg> not found. Exit." - exit 1 -fi -newreffile=`echo $newrefpath | cut -d ":" -f 1` -if [ ! -f $newreffile ]; then - echo "ERROR: input han results file <$newreffile> not found. Exit." - exit 1 -fi -toppath=`echo $newrefpath | cut -d ":" -f 2` -if [ ! $toppath = $newreffile ]; then - if [ ! $toppath = run_${runnumber} ]; then - echo "ERROR: toppath <$toppath> and given runnumber <$runnumber> do not match. Exit." - exit 1 - fi -fi - - -## first, update han references -RUNCMDA="tct_changehanreferences.py ${inputhcfg} ${newreffile}:run_${runnumber} intermediate.hcfg --forceref" -echo -echo ">> Now running command:" -echo ">> ====================" -echo "$RUNCMDA" -echo -if [ $DRYRUN -ne 1 ]; then - #source /afs/cern.ch/atlas/project/tdaq/cmt/bin/cmtsetup.sh dqm-common-00-03-02 - #cvs co -P -r DQConfMaker-01-00-01 common/DQConfMaker - #export PYTHONPATH=$PYTHONPATH:$PWD/common/DQConfMaker/python - ## fix this - put DQConfMaker in release - export PYTHONPATH=$PYTHONPATH:/afs/cern.ch/atlas/offline/external/FullChainTest/tier0-vol3/rtt/fallbackfiles/DQConfMaker/python - $RUNCMDA - #rm -Rf common -fi -echo -echo ">> Finished command:" -echo ">> =================" -echo "$RUNCMDA" -echo - - -## second, update the han algorithms -RUNCMDB="tct_changehanalgorithms.py intermediate.hcfg Tier0ChainTests.newalg_bincontentcomp ${outputhcfg}" -echo -echo ">> Now running command:" -echo ">> ====================" -echo "$RUNCMDB" -echo -if [ $DRYRUN -ne 1 ]; then - $RUNCMDB -fi -echo -echo ">> Finished command:" -echo ">> =================" -echo "$RUNCMDB" -echo - - -## this should only be reached if all successful ;-) -echo "ErrorCode=0 (OK)" - diff --git a/Tools/Tier0ChainTests/scripts/tct_dqutil.py b/Tools/Tier0ChainTests/scripts/tct_dqutil.py deleted file mode 100755 index c468d89483ce0940f369ebc37ddaa5342e57a234..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_dqutil.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import sys -import os, os.path - -def importConfiguration(modname): - mod = __import__(modname) - components = modname.split('.') - for comp in components[1:]: - mod = getattr(mod,comp) - return mod - -if __name__ == "__main__": - - if len(sys.argv) < 3: - print "usage: ", sys.argv[0], "<configModule> <property>" - sys.exit(1) - - configModule = sys.argv[1] - property = sys.argv[2] - - try: - cmod = importConfiguration(configModule) - except Exception, e: - print "Could not import configuration module \'" + configModule + "\'" - sys.exit(1) - - config = "" - pycmd = "config = cmod.%s" % property - - try: - exec pycmd - except Exception, e: - print "Configuration object '%s' not defined in module '%s'. Exit" % (property,configModule) - sys.exit(1) - - print config - diff --git a/Tools/Tier0ChainTests/scripts/tct_dqwebdisplay.py b/Tools/Tier0ChainTests/scripts/tct_dqwebdisplay.py deleted file mode 100755 index 11455a24f288195dbe89a5bb87c78d9fa1d2a0f2..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_dqwebdisplay.py +++ /dev/null @@ -1,174 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import os,sys,datetime,time,shutil - -__author__ = "Renaud Bruneliere <Renaud.Bruneliere@cern.ch>" -__doc__ = """Python script used by tct to run sequentially -the data quality web displaying jobs""" - -def parseCmdLine(args): - """ Parse input command line to optdict. - To get the whole list of options type : tct_dqwebdisplay.py -h""" - from optparse import OptionParser - parser = OptionParser() - # The following options are specific to tct_recotrf.py command - parser.add_option("-d","--dry-run", dest="dryrun", help="Dry running", - action='store_true', default=False) - parser.add_option("-m","--monitorfile", dest="monitorfile", - help="Input monitoring root file", - default="myMergedMonitoring_IDCosmic.root") - parser.add_option("--dqconfig", dest="dqconfig", - help="DQ configuration", - default='Tier0ChainTests.LocalDQWebDisplayCosmics') - parser.add_option("-c","--hanconfig", dest="hanconfig", - help="Handy run configuration", - default='cosmics_prevrun.latest.hcfg') - parser.add_option("--handir", dest="handir", help="Output han directory", - default='') - parser.add_option("--webdir", dest="webdir", help="Output web directory", - default='') - (config, args) = parser.parse_args(args) - return config - -def rundqdisplayjob(dqname,config,hanshortlist): - print "\ntct_dqwebdisplay.py: start running on",dqname,"...",datetime.datetime.now() - if dqname == 'Tier0ChainTests.DynamicDQWebDisplayCosmics' and config.hanconfig == '': - print "tct_dqwebdisplay.py: WARNING: cannot run",dqname,'because hanconfig is empty' - return - isstaticcomp = (dqname == 'Tier0ChainTests.TCTDQWebDisplayCosmics') - # check and build if missing output web directories - dqimport = __import__(dqname) - dqwebdisplay = getattr(dqimport,dqname.split('.')[1]) - dqconfig = dqwebdisplay.dqconfig - timestamp = time.strftime('%Y%m%d%H%M%S') - atlasarea = os.environ['AtlasArea'].split('/') - dqdir = atlasarea[7]+'_'+atlasarea[8]+'_'+atlasarea[9] - handir = dqconfig.hanResultsDir - if config.handir != '': handir = config.handir - if not os.path.isdir(handir): os.mkdir(handir) - webdir = dqconfig.htmlDir - if config.webdir != '': webdir = config.webdir - if not os.path.isdir(webdir): os.mkdir(webdir) - finalwebdir = webdir.replace('wwwtemp','www') - if not os.path.isdir(finalwebdir): os.mkdir(finalwebdir) - webdir=webdir+'/'+timestamp - finalwebdir=finalwebdir+'/'+dqdir+'_'+dqconfig.config - htmldir=(dqconfig.htmlWeb.replace('wwwtemp','www'))+'/'+dqdir+'_'+dqconfig.config - if os.path.isdir(webdir): - print "tct_dqwebdisplay.py: removing",webdir - if not config.dryrun: os.system('rm -Rf %s' % webdir) - if os.path.isdir(finalwebdir): - print "tct_dqwebdisplay.py: removing",finalwebdir - if not config.dryrun: os.system('rm -Rf %s' % finalwebdir) - pass - # preparation - if os.path.isfile('MyCOOL_histo.db'): os.system('rm -f MyCOOL_histo.db') - print "tct_dqwebdisplay.py: running tct_CreateDB.py" - if not config.dryrun: os.system('tct_CreateDB.py') # create dummy cool database - print "tct_dqwebdisplay.py: dqconfig.hcfg =",dqconfig.hcfg - if not os.path.isfile(dqconfig.hcfg): - hanconfigfile='/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/tct_reference_run.hcfg' - print "tct_dqwebdisplay.py: Using han configuration fall-back file:",hanconfigfile - os.system('ln -s %s %s' % (hanconfigfile,dqconfig.hcfg)) - pass - if not os.path.isfile(dqconfig.hcfg_min10): - hanconfigfile='/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/tct_reference_minutes10.hcfg' - print "tct_dqwebdisplay.py: Using han configuration fall-back file:",hanconfigfile - os.system('ln -s %s %s' % (hanconfigfile,dqconfig.hcfg_min10)) - pass - if not os.path.isfile(dqconfig.hcfg_min30): - hanconfigfile='/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/tct_reference_minutes30.hcfg' - print "tct_dqwebdisplay.py: Using han configuration fall-back file:",hanconfigfile - os.system('ln -s %s %s' % (hanconfigfile,dqconfig.hcfg_min30)) - pass - ## Run command - runcmd = "DQWebDisplay.py %s %s %s" % (config.monitorfile,dqname,timestamp) - print "tct_dqwebdisplay.py: exact command is \"",runcmd,"\"" - print "tct_dqwebdisplay.py: starting DQWebDisplay.py...",datetime.datetime.now() - if not config.dryrun: - os.system(runcmd) - for filename in os.popen('ls %s/%s*/*/*/run_*_han.root' % (handir,timestamp)): - filename = filename.strip('\n') - newname = (filename.split('/'))[len(filename.split('/'))-1] - newlist = newname.split('_') - newname = newname.replace(newlist[1],"") - print "tct_dqwebdisplay.py: copying",filename,"to",newname - print "tct_dqwebdisplay.py: current working dir is",os.getcwd() - shutil.copyfile(filename,newname) - pass - print 'tct_dqwebdisplay.py: moving',webdir,'to',finalwebdir - os.system('mv %sL %s' % (webdir,finalwebdir)) # final webdir reflects atlas project and release - pass - if dqname == 'Tier0ChainTests.ReleaseDQWebDisplayCosmics' and os.path.isfile('run__han.root'): - os.system('mv run__han.root run__han__release.root') - pass - ## make weblink file - os.system('rm -f dqweblink_'+dqconfig.config+'.html') - fdqweblink = open('dqweblink_'+dqconfig.config+'.html','w') - sdqweblink = """ -<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transititional.dtd"> -<html> - <head> - <title>Home | TCT Redirection page</title> - <META http-equiv="refresh" content="1; URL=%s"> - </head> - -<body> -You will be redirected automatically in 1 second to the <a href="%s">tct DQ webpage</a>. -</body> -</html> -EOF -""" - fdqweblink.write(sdqweblink % (htmldir,htmldir)) - fdqweblink.close() - print "tct_dqwebdisplay.py: ...ending",dqname,datetime.datetime.now() - return - -# -# Main job -# -print "tct_dqwebdisplay.py: starting",datetime.datetime.now() -workdir = rttdir = os.getcwd() -if "WORKDIR" in os.environ: workdir = os.environ["WORKDIR"] -print "tct_dqwebdisplay.py: working directory is",workdir -print "tct_dqwebdisplay.py: rttdir directory is",rttdir -os.chdir(workdir) -config = parseCmdLine(sys.argv[1:]) -hanlist = config.hanconfig.split(',') -hanshortlist = [] -# Copy han configuration files in workdir -for hanfile in hanlist: - hanshort = (hanfile.split('/'))[len(hanfile.split('/'))-1] - hanshortlist.append(hanshort) - if rttdir != workdir: shutil.copyfile(hanfile,workdir+'/'+hanshort) - pass -# Loop over different dq configurations -for dqname in config.dqconfig.split(','): - rundqdisplayjob(dqname,config,hanshortlist) - pass -# Loop over the different hcfg files and update Han configuration -print "tct_dqwebdisplay.py: starting Han replace running...",datetime.datetime.now() -runnumber = os.environ['RUNNUMBER'] -cmd = 'tct_dqupdatereferences.sh %s %s %s %s' -for hanshort in hanshortlist: - hanshortlatest = hanshort.replace('_prev','_') - mycmd = cmd % (hanshortlatest,config.monitorfile,hanshort,runnumber) - print "tct_dqwebdisplay.py: exact command is \"",mycmd,"\"" - if not config.dryrun: os.system(mycmd) - pass -print "tct_dqwebdisplay.py: ...ending Han replace running",datetime.datetime.now() -# Copy output files back on afs rttdir -os.chdir(rttdir) -if rttdir != workdir: - print "\ntct_dqwebdisplay.py: start copying output files to rttdir...",rttdir,datetime.datetime.now() - print "tct_dqwebdisplay.py: rttdir =",rttdir - for filename in os.listdir(workdir): - print "tct_dqwebdisplay.py: copying",filename - if not config.dryrun: shutil.copyfile(workdir+'/'+filename,rttdir+'/'+filename) - pass - print "tct_dqwebdisplay.py: ...output files copying to rttdir done ",datetime.datetime.now() - pass -print "ErrorCode=0 (OK)" -print "\ntct_dqwebdisplay.py: ending",datetime.datetime.now() diff --git a/Tools/Tier0ChainTests/scripts/tct_dqwebdisplay.sh b/Tools/Tier0ChainTests/scripts/tct_dqwebdisplay.sh deleted file mode 100755 index 439a92f16567e068cd65c86e7c203ebe14b1a0e7..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_dqwebdisplay.sh +++ /dev/null @@ -1,188 +0,0 @@ -#!/bin/sh - -## Check arguments -if [ $# -lt 1 ] ; then - echo "Usage: tct_dqwebdisplay.sh <monitorfile> [<dqconfig>] [<hanrunconfig>] [<han10mconfig>] [<han30mconfig>] [<handir>] [<wwwdir>]" - exit 1 -fi - -## intercept dryrun option if given -DRYRUN=0 -if [ "$1" = "-d" ] ; then - DRYRUN=1 - shift -fi - -## Check existence of template files -if [ ! -f $1 ] ; then - echo "ERROR: Cannot find monitoring file " $1 - exit 1 -fi - -## setup configuration -monitorfile=$1 -display="Tier0ChainTests.LocalDQWebDisplayCosmics" -if [ $# -gt 1 ] ; then - display=$2 -fi -dqconfig=`tct_dqutil.py ${display} dqconfig.config` - -## han configuration files -defhanrun=`tct_dqutil.py ${display} dqconfig.hcfg` -hanrun=$defhanrun -if [ $# -gt 2 ] ; then - if [ ! -f $3 ] ; then - echo "ERROR: Cannot find han configuration file $3. Exit." - exit 1 - fi - hanrun=$3 -fi -defhan10m=`tct_dqutil.py ${display} dqconfig.hcfg_min10` -han10m=$defhan10m -if [ $# -gt 3 ] ; then - if [ ! -f $4 ] ; then - echo "ERROR: Cannot find han configuration file $4. Exit." - exit 1 - fi - han10m=$4 -fi -defhan30m=`tct_dqutil.py ${display} dqconfig.hcfg_min30` -han30m=$defhan30m -if [ $# -gt 4 ] ; then - if [ ! -f $5 ] ; then - echo "ERROR: Cannot find han configuration file $5. Exit." - exit 1 - fi - han30m=$5 -fi - -## han results directory -defhandir=`tct_dqutil.py ${display} dqconfig.hanResultsDir` -handir=$defhandir -if [ $# -gt 5 ] ; then - if [ ! -d $6 ]; then - echo "ERROR: Cannot find directory $6. Exit." - exit 1 - fi - handir=$6 -fi -if [ ! -d $defhandir ]; then - if [ ! $handir = $defhandir ]; then - ln -s $handir $defhandir - else - mkdir -p $defhandir - fi -fi - -## dq web directory -defwebdir=`tct_dqutil.py ${display} dqconfig.htmlDir` -webdirtemp=$defwebdir -if [ $# -gt 6 ] ; then - if [ ! -d $7 ]; then - echo "ERROR: Cannot find directory $7. Exit." - exit 1 - fi - webdirtemp=$7 -fi -if [ ! -d $defwebdir ]; then - if [ ! $webdirtemp = $defwebdir ]; then - ln -s $webdirtemp $defwebdir - else - mkdir -p $defwebdir - fi -fi - -version=$(date +%u) -if [ $version = 7 ]; then - version=0 -fi -timestamp=$(date "+%Y%m%d%H%M%S") -dqdir=`echo $AtlasArea | cut -d "/" -f 8,9,10 | gawk '{ gsub("/","_") ; print $0}'` -finalwebdir=`echo $webdirtemp | gawk '{ gsub("wwwtemp","www") ; print $0}'` -mkdir -p $finalwebdir - -webdirtemp=${webdirtemp}/${timestamp} -finalwebdir="${finalwebdir}/${dqdir}_${dqconfig}" -htmldir="http://atlas-project-fullchaintest.web.cern.ch/atlas-project-FullChainTest/tier0/dqm/www/${dqdir}_${dqconfig}" - -## create dqwebdisplay - -if [ -f MyCOOL_histo.db ] ; then - rm -f MyCOOL_histo.db -fi - -if [ -d $webdirtemp ] ; then - rm -Rf $webdirtemp -fi -if [ -d $finalwebdir ] ; then - rm -Rf $finalwebdir -fi - -## preparation -tct_CreateDB.py # create dummy cool database -echo # fall-back han configuration files -if [ ! -f $defhanrun ]; then - if [ ! $hanrun = $defhanrun ]; then - ln -s $hanrun $defhanrun - else - hanconfigfile=/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/tct_reference_run.hcfg - echo "WARNING: Using han configuration fall-back file: ${hanconfigfile}" - ln -s $hanconfigfile $defhanrun - fi -fi -if [ ! -f $defhan10m ]; then - if [ ! $han10m = $defhan10m ]; then - ln -s $han10m $defhan10m - else - hanconfigfile=/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/tct_reference_minutes10.hcfg - echo "WARNING: Using han configuration fall-back file: ${hanconfigfile}" - ln -s $hanconfigfile $defhan10m - fi -fi -if [ ! -f $defhan30m ]; then - if [ ! $han30m = $defhan30m ]; then - ln -s $han30m $defhan30m - else - hanconfigfile=/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/tct_reference_minutes30.hcfg - echo "WARNING: Using han configuration fall-back file: ${hanconfigfile}" - ln -s $hanconfigfile $defhan30m - fi -fi - -## run dqwebdisplay -RUNCMD="DQWebDisplay.py $monitorfile $display $timestamp" -echo -echo ">> Now running command:" -echo ">> ====================" -echo "$RUNCMD" -echo -if [ $DRYRUN -ne 1 ]; then - $RUNCMD - cp -f ${handir}/${timestamp}/*/*/*han.root ./ ; tct_fixhanrootnames.py - mv $webdirtemp $finalwebdir # final webdir reflects atlas project and release -fi -echo -echo ">> Finished command:" -echo ">> =================" -echo "$RUNCMD" -echo - -## make weblink file -rm -f dqweblink.html -cat > dqweblink.html <<EOF -<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transititional.dtd"> -<html> - <head> - <title>Home | TCT Redirection page</title> - <META http-equiv="refresh" content="1; URL=$htmldir"> - </head> - -<body> -You will be redirected automatically in 1 second to the <a href="$htmldir">tct DQ webpage</a>. -</body> -</html> -EOF - -## this should only be reached if all successful ;-) -echo "ErrorCode=0 (OK)" - diff --git a/Tools/Tier0ChainTests/scripts/tct_findOverwrite.py b/Tools/Tier0ChainTests/scripts/tct_findOverwrite.py deleted file mode 100755 index 584168a52f60ecf44051729f0063a1e77b1e446b..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_findOverwrite.py +++ /dev/null @@ -1,93 +0,0 @@ -#!/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration -# Susumu Oda (2011/07/28) - -import os,sys -if len(sys.argv)<9 or sys.argv[1]=="-h" or sys.argv[1]=="--help": - print "Usage: tct_findOverwrite.py <ESD w/HIST> <AOD w/ HIST> <TAG w/ HIST> <ESD w/o HIST> <AOD w/o HIST> <TAG w/o HIST> <TAG from ESD> <test name>" - print " Example: tct_findOverwrite.py myESD_Egamma_0.pool.root myAOD_Egamma_0.AOD.pool.root myTAG_Egamma_0.root myESD_Egamma_1.pool.root myAOD_Egamma_1.AOD.pool.root myTAG_Egamma_1.root myTAG_Egamma_2.root Run00184169_Egamma_Collisions_Comparison" - sys.exit(-1) - -from PROCTools.compareTCTs import * - -irrelevantContainers = ['RecoTimingObj_p1_RAWtoESD_timings', 'RecoTimingObj_p1_ESDtoAOD_timings', 'EventBookkeeperCollection_p2_EventBookkeepers', 'EventBookkeeperCollection_p2_IncompleteEventBookkeepers', 'IOVMetaDataContainer_p1__TagInfo', 'EventFormat', '_TagInfo', 'xAOD::EventFormat_v1_EventFormat'] - -ESDwHIST = sys.argv[1] -if not os.path.exists(ESDwHIST): - print "%s doesn't exist" % ESDwHIST - ESDwHIST = None - -AODwHIST = sys.argv[2] -if not os.path.exists(AODwHIST): - print "%s doesn't exist" % AODwHIST - AODwHIST = None - -TAGwHIST = sys.argv[3] -if not os.path.exists(TAGwHIST): - print "%s doesn't exist" % TAGwHIST - TAGwHIST = None - -ESDwoHIST = sys.argv[4] -if not os.path.exists(ESDwoHIST): - print "%s doesn't exist" % ESDwoHIST - ESDwoHIST = None - -AODwoHIST = sys.argv[5] -if not os.path.exists(AODwoHIST): - print "%s doesn't exist" % AODwoHIST - AODwoHIST = None - -TAGwoHIST = sys.argv[6] -if not os.path.exists(TAGwoHIST): - print "%s doesn't exist" % TAGwoHIST - TAGwoHIST = None - -TAGfromESD = sys.argv[7] -if not os.path.exists(TAGfromESD): - print "%s doesn't exist" % TAGfromESD - TAGfromESD = None - -testName = sys.argv[8] -sumFile=open(testName+".txt","w") -sumFile.write(testName) - -details = None -identical = [False, False, False, False] -compName = ["Comparison #1: TAG with HIST vs TAG without HIST", - "Comparison #2: TAG from AOD vs TAG from ESD ", - "Comparison #3: ESD with HIST vs ESD without HIST", - "Comparison #4: AOD with HIST vs AOD without HIST"] - -if (TAGwHIST!=None and TAGwoHIST!=None): - print "\nComparison #1: TAG with HIST [%s] vs TAG without HIST [%s]" % (TAGwHIST,TAGwoHIST) - (eq,dif) = compareTreeFiles(TAGwHIST,TAGwoHIST,details) - identical[0] = (dif==0) - -if (TAGwoHIST!=None and TAGfromESD!=None): - print "\nComparison #2: TAG from AOD [%s] vs TAG from ESD [%s]" % (TAGwoHIST,TAGfromESD) - (eq,dif) = compareTreeFiles(TAGwoHIST,TAGfromESD,details) - identical[1] = (dif==0) - -if (ESDwHIST!=None and ESDwoHIST!=None): - print "\nComparison #3: ESD with HIST [%s] vs ESD without HIST [%s]" % (ESDwHIST,ESDwoHIST) - identical[2] = not diffPoolFiles(ESDwHIST,ESDwoHIST,details,toIgnore=irrelevantContainers) - -if (AODwHIST!=None and AODwoHIST!=None): - print "\nComparison #4: AOD with HIST [%s] vs AOD without HIST [%s]" % (AODwHIST,AODwoHIST) - identical[3] = not diffPoolFiles(AODwHIST,AODwoHIST,details,toIgnore=irrelevantContainers) - -if (identical[0] and identical[1] and identical[2] and identical[3]): - print "\nResult: All comparisons are OK." -else: - print "\nResult: At least one comparison is NOT OK:" - for i in range(4): - if not identical[i]: print " %s is NOT OK" % (compName[i]) - -if (identical[0] and identical[1] and identical[2] and identical[3]): sumFile.write(" [OK]\n") -else: sumFile.write(" [ERR]\n") -for i in range(4): - sumFile.write(" "+compName[i]+" is ") - if not identical[i]: sumFile.write("NOT OK\n") - else: sumFile.write(" OK\n") -sumFile.close() diff --git a/Tools/Tier0ChainTests/scripts/tct_finishedMail.py b/Tools/Tier0ChainTests/scripts/tct_finishedMail.py deleted file mode 100755 index ab3b95b8d0c8ab50f4bc33483a393a19600b6e4c..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_finishedMail.py +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration -# -# tct_finishedMail.py: utilities for sending notification emails -# about the results from the tests in the Tier0ChainTests -# - -import sys, string, os, smtplib -from get_release_2017 import (fn2dt, _get_release) - -def sendSms(msg,toAdd='0041762725254@sms.switch.ch'): - yourCernMailAdd = 'rtt@mail.cern.ch' - cernSMTPsrv='cernmx.cern.ch' - s=smtplib.SMTP(cernSMTPsrv) - toList=[toAdd] - s.sendmail(yourCernMailAdd,toList,msg) - print "<br>%s<br>A notification has been sent to the admin<br>" % msg - s.quit() - return - -def sendMail(recipient, message): - s = smtplib.SMTP('cernmx.cern.ch') - s.sendmail('rtt@mail.cern.ch', recipient, message) - s.quit() - return - -## main -if __name__ == "__main__": - - usageline = "usage: " + sys.argv[0] + " <emailaddress1,emailaddress2,emailaddress3,etc> summaryESDTAGcomp.txt Run00184169_Egamma_Collisions_Comparison.txt,Run00183021_MinBias0_Collisions_Comparison.txt" - if (len(sys.argv)<2): - print usageline - sys.exit(1) - - project = os.environ["AtlasProject"] - nightly = os.environ["AtlasBuildStamp"] - branch = os.environ["AtlasBuildBranch"] - cmtconfig = os.environ['CMTCONFIG'] - relNumber = _get_release(fn2dt(nightly)) - release = "%s %s %s" % (nightly, branch, project) - - emailAddresses = sys.argv[1] - - resultsurl = "https://atlas-rtt.cern.ch/index.php?q=%%28release=%s;packagename=Tier0ChainTests;branch=%s;cmtconfig=%s;project=%s;verbosity=vvv;%%29" % ( relNumber, branch, cmtconfig, project ) - comparisonlogurl = "https://atlas-rtt.cern.ch/prod/rtt/%s/%s/build/%s/%s/Tier0ChainTests/ESDTAGCOMM_comparison/ESDTAGCOMM_comparison_log" % ( relNumber, branch, cmtconfig, project ) - - msgbody = """<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> -<body> -<h1></h1> -<p style=\"font-family: monospace;\"> -The results from the Tier0ChainTests are available <a href=\"%s\">here</a> -<br><br> -(Note that results may be published on the web with a slight delay) -</p> -""" % resultsurl - - if len(sys.argv) >= 3: - filesNames = sys.argv[2] - filesList = filesNames.split(',') - for fileName in filesList: - if not os.path.exists(fileName): continue - if 'ESDTAGcomp' in fileName: - fSummary = open(fileName,'r') - testSummaryString = """ -<p style=\"font-family: monospace;\"> -==========================================<br> -= Summary of comparisons (ESD, AOD, TAG) =<br> -==========================================<br> -<br> -(Full log available <a href=\"%s\">here</a>) -</p> -<pre>""" % comparisonlogurl - - lines = [line for line in fSummary] - lines.sort() - for line in lines: - testSummaryString += line - pass - testSummaryString += "</pre>" - continue - # the rest of the files, if any - fSummary = open(fileName,'r') - lSummary = fSummary.readlines() - isSuccess = False - for line in lSummary: - if 'TestCode=SUCCESS' in line: isSuccess = True - pass - fileNameList = fileName.split('/') - shortFileName = fileNameList[len(fileNameList)-1] - if isSuccess: - testSummaryString += ("%s = OK\n" % (shortFileName.split('.'))[0]) - else: - testSummaryString += ("%s = NOT OK\n" % (shortFileName.split('.'))[0]) - pass - pass - pass - - msgbody += testSummaryString - - overwriteCheckString = """ -<pre> - -=================================================================== -= Summary of comparisons of TAGs made with and without monitoring = -=================================================================== - -""" - if len(sys.argv) >= 4: - filesNames = sys.argv[3] - filesList = filesNames.split(',') - for fileName in filesList: - if not os.path.exists(fileName): continue - fOverwrite = open(fileName, 'r') - for line in fOverwrite: - overwriteCheckString += line - - overwriteCheckString += "</pre>\n" - msgbody += overwriteCheckString - - tctStatusString = "<pre><br>" - os.system('tct_getstatus.py --release=%s --day=%s --dq --dump' % ( branch, relNumber ) ) - fstatus = open('tctstatus.txt','r') - for line in fstatus: - - tctStatusString += line - - tctStatusString += "</pre>" - msgbody += tctStatusString - msgbody += "</html>" - - ## send emails - print "Sending email(s) to: " + emailAddresses - msg = "From: rtt@mail.cern.ch\nSubject: Finished TCT for nightly: %s\nTo: %s\nContent-Type: text/html\n\n%s\n" % (release, emailAddresses, msgbody) - - recipients = emailAddresses.split(',') - for recipient in recipients: - print "Will now send notification email to %s with the following contents:" % recipient - print msg - sendMail(recipient, msg) - - ## send text messages - #sendSms("From: rtt@mail.cern.ch\nSubject: test\n") - - print "(the line below is to trick the RTT framework that this \"Athena\" job finished successfully)\n" - print "ErrorCode=0 (OK)" diff --git a/Tools/Tier0ChainTests/scripts/tct_fixhanrootnames.py b/Tools/Tier0ChainTests/scripts/tct_fixhanrootnames.py deleted file mode 100755 index 1c6ba04f81545aa85bea0add496adf6398586c09..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_fixhanrootnames.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import sys -import os, os.path - -files=os.listdir(".") - -for file in files: - if not file.endswith("han.root"): continue - splitArr = file.split("_") - newfile = file.replace(splitArr[1],"") - os.system("mv %s %s" % (file,newfile)) - diff --git a/Tools/Tier0ChainTests/scripts/tct_generateXML.py b/Tools/Tier0ChainTests/scripts/tct_generateXML.py deleted file mode 100644 index d8a6472b87df3cb52411ca0d7599197a47f16bf0..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_generateXML.py +++ /dev/null @@ -1,407 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import os,xml.dom.minidom - -class tct_generateXML: - def __init__(self): - self.castorloc="/castor/cern.ch/grid/atlas/DAQ/2009/" - self.fallbackloc="/afs/cern.ch/atlas/offline/external/FullChainTest/tier0-vol3/rtt/fallbackfiles/" - self.runnums=["00135664","00135492","Latest"] - self.lateststreams = ['physics_CosmicMuons'] - self.nevent=500 - self.amitag="f160" - self.nparjobs=3 - self.mergeCommands={'MergeJobIDComm':{'run':'00135664','streams':['IDCosmic'],'type':'IDCOMM'}, - 'MergeJobAODandTAG':{'run':'00135664','streams':['IDCosmic','L1Calo'],'type':'AODandTAG'}} - self.dqwebdisplays={'StaticComparison':{'config':'Tier0ChainTests.TCTDQWebDisplayCosmics', - 'input':['cosmics_run.latest.hcfg', - 'cosmics_minutes10.latest.hcfg', - 'cosmics_minutes30.latest.hcfg']}, - 'HanFromReleaseComparison':{'config':'Tier0ChainTests.ReleaseDQWebDisplayCosmics', - 'output':'run__han__release.root'}, - 'ComparisonWithYesterday':{'config':'Tier0ChainTests.DynamicDQWebDisplayCosmics', - 'input':['cosmics_prevrun.latest.hcfg', - 'cosmics_prevminutes10.latest.hcfg', - 'cosmics_prevminutes30.latest.hcfg'], - 'output':'run__han.root'}, - } - self.dqfallbackfiles=['/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_run.191526.hcfg', - '/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_minutes10.1.12.hcfg', - '/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Cosmics/cosmics_minutes30.1.9.hcfg'] - self.mydom=self.createCore() - - - def generateRecoJobs(self): - # Add list of reconstruction jobs as a parallel chain - recoel = self.mydom.createElement("parallel") - self.mydom.getElementsByTagName("sequential")[0].appendChild(recoel) - for runnum in self.runnums: - for stream in self.outputStreams(runnum): - for seq in range(1,self.nparjobs): - self.createRecoCE(runnum,stream,seq) - - def generateMergeJobs(self): - # Add list of reconstruction jobs as a parallel chain - self.allowedMergeTypes=['AODandTAG','IDCOMM'] - mergeel = self.mydom.createElement("parallel") - self.mydom.getElementsByTagName("sequential")[0].appendChild(mergeel) - print self.mergeCommands - for mergeCommand in self.mergeCommands: - self.createMergeCE(mergeCommand) - - - def generateDQWebDisplays(self): - # Add list of data quality job displays as a parallel chain - dqwebel = self.mydom.createElement("parallel") - self.mydom.getElementsByTagName("sequential")[0].appendChild(dqwebel) - for dqwebname in self.dqwebdisplays: - self.createDQWebCE(dqwebname,self.dqwebdisplays[dqwebname]) - - - def outputStreams(self,runnum): - inputloc=self.castorloc+runnum - if runnum == "Latest": - return self.lateststreams - OutStr=[] - streams = os.popen("nsls %s" % inputloc).readlines() - if len(streams) == 0: - print "Run "+runnum+" not found on castor" - return OutStr - for stream in streams: - if 'physics' in stream: - stream = stream.strip('\n').lstrip('physics_') - OutStr.append(stream) - return OutStr - - def createRecChainFileOut(self,stream,seq): - filelist=[] - filelist = filelist+["job_"+stream+"_"+str(seq)+".pool.root"] - if stream == "IDCosmic": - filelist = filelist+["myMergedMonitoring_"+stream+"_"+str(seq)+".root"] - return filelist - - def createCore(self): - # Create XML file header - domCore=xml.dom.minidom.parseString(self.xmlCoreString()) - return domCore - - def createRecoCE(self,runnum,stream,seq): - # This is the template for the job definition - doc1=xml.dom.minidom.parseString(self.xmlRecoString()) - - # Set the jobTransformJobName - jobNameEl=doc1.getElementsByTagName("jobTransformJobName")[0] - jobNameEl.childNodes[0].data="Run"+runnum+"_"+stream+str(seq) - - # Set the jobTransformSummaryName - jobNameElList=doc1.getElementsByTagName("argvalue") - for iEl,jobNameEl in enumerate(jobNameElList): - if not 'SummaryName' in jobNameEl.childNodes[0].data: continue - jobNameEl.childNodes[0].data="Run"+runnum+"_"+stream+str(seq)+".summary.txt" - break - - # Set the jobTransformCmd - cmdEl=doc1.getElementsByTagName("jobTransformCmd")[0] - inputloc=self.castorloc+runnum - if runnum == "Latest": - cmdEl.childNodes[0].data="tct_recotrf2.sh -job "+str(seq)+" -trigstr "+stream+" -nfiles 1 -nevents "+str(self.nevent)+" -castor "+inputloc+" -amitag "+self.amitag+" -castorsubdir" - else: - cmdEl.childNodes[0].data="tct_recotrf2.sh -job "+str(seq)+" -trigstr "+stream+" -nfiles 3 -nevents "+str(self.nevent)+" -runnum "+runnum+" -castor "+inputloc+" -amitag "+self.amitag - pass - - # Get the list of outputfiles to keep - #fileL = self.createRecChainFileOut(stream,seq) - #for fileN in fileL: - # cfo = doc1.createElement("chainfileout") - # cfoText=doc1.createTextNode(fileN) - # cfo.appendChild(cfoText) - # chainEl=doc1.getElementsByTagName("chainElement")[0] - # chainEl.appendChild(cfo) - # pass - - # Insert reco CE into main xml. Make a deep copy - x = self.mydom.importNode(doc1.childNodes[0],True) - self.insertXML(x) - return - - def createMergeCE(self,mc): - print "Merging with command " - print self.mergeCommands[mc] - runnum=self.mergeCommands[mc]['run'] - streams=self.mergeCommands[mc]['streams'] - type=self.mergeCommands[mc]['type'] - # check run number - if runnum not in self.runnums: - print "merging for undefined run requested, return"+runnum - return - #check streams - definedStreams=self.outputStreams(runnum) - for stream in streams: - if stream not in definedStreams: - print "merging for undefined stream requested, run "+runnum+"stream "+stream - return - # check type - if type not in self.allowedMergeTypes: - print "merging for undefined tyoe requested, return"+type - # Make sure the files needed are kept - inputFiles=[] - # This is the list of reco elements, used to find the add point - recEls=self.mydom.getElementsByTagName("jobTransformJobName") - - filetype=type - if type=="AODandTAG": filetype = "AOD" - for stream in streams: - for seq in range(1,self.nparjobs): - inputFileNeeded="Run"+runnum+"_"+stream+str(seq)+"."+filetype+".pool.root" - inputFiles.append(inputFileNeeded) - trfName="Run"+runnum+"_"+stream+str(seq) - recElNeed = None - for recEl in recEls: - if recEl.childNodes[0].data == trfName: - cfo = self.mydom.createElement("chainfileout") - cfoText=self.mydom.createTextNode(inputFileNeeded) - cfo.appendChild(cfoText) - recEl.parentNode.parentNode.appendChild(cfo) - print "I found tag with job name "+trfName - - def createDQWebCE(self,dqwebname,dqwebdict): - # This is the template for the job definition - doc1=xml.dom.minidom.parseString(self.xmlDQWebString()) - - # Set the jobTransformJobName - jobNameEl=doc1.getElementsByTagName("jobTransformJobName")[0] - jobNameEl.childNodes[0].data="DQWebDisplay_"+dqwebname - - # Set the jobTransformSummaryName - jobNameElList=doc1.getElementsByTagName("argvalue") - for iEl,jobNameEl in enumerate(jobNameElList): - if not 'SummaryName' in jobNameEl.childNodes[0].data: continue - jobNameEl.childNodes[0].data="Run"+runnum+"_"+stream+str(seq)+".summary.txt" - break - - # Set the jobTransformCmd - cmdEl=doc1.getElementsByTagName("jobTransformCmd")[0] - inputloc=self.castorloc+runnum - if runnum == "Latest": - cmdEl.childNodes[0].data="tct_recotrf2.sh -job "+str(seq)+" -trigstr "+stream+" -nfiles 1 -nevents "+str(self.nevent)+" -castor "+inputloc+" -amitag "+self.amitag+" -castorsubdir" - else: - cmdEl.childNodes[0].data="tct_recotrf2.sh -job "+str(seq)+" -trigstr "+stream+" -nfiles 3 -nevents "+str(self.nevent)+" -runnum "+runnum+" -castor "+inputloc+" -amitag "+self.amitag - pass - - # Get the list of outputfiles to keep - fileL = self.createRecChainFileOut(stream,seq) - for fileN in fileL: - cfo = doc1.createElement("chainfileout") - cfoText=doc1.createTextNode(fileN) - cfo.appendChild(cfoText) - chainEl=doc1.getElementsByTagName("chainElement")[0] - chainEl.appendChild(cfo) - pass - - # Insert reco CE into main xml. Make a deep copy - x = self.mydom.importNode(doc1.childNodes[0],True) - self.insertXML(x) - return - - def insertXML(self,xmlFrag): - # Append the CE to the last parallel sequence - mainel = self.mydom.getElementsByTagName("parallel") - mainel[len(mainel)-1].appendChild(xmlFrag) - - - def WriteXMLFile(self,filename,verbose): - # write explicit XML file - fout = open(filename,'w') - lines = self.mydom.toprettyxml().split('\n') - for i,line in enumerate(lines): - if line.strip('\t') == '' or line.strip(' ') == '': continue # remove empty lines - line = line.replace('\t',' ') - shortline = line.strip() - test1 = False - if i < len(lines) - 2: - compline = lines[i+2].replace('\t','') - compline = compline.strip() - compline = compline.replace('/','') - if shortline == compline: test1 = True - pass - test2 = False - if i > 2: - compline = lines[i-2].replace('\t','') - compline = compline.strip() - compline = compline.replace('<','</') - if shortline == compline: test2 = True - pass - test3 = False - if i > 1: - compline1 = lines[i-1].replace('\t','') - compline1 = compline1.strip() - compline2 = lines[i+1].replace('\t','') - compline2 = compline2.strip() - compline2 = compline2.replace('/','') - if len(compline1) and compline1 == compline2: test3 = True - pass - if test1 or test3: - if test3: line = line.strip() - fout.write(line) - if verbose: - print line, - else: - if test2: line = line.strip() - fout.write(line+'\n') - if verbose: - print line - pass - pass - fout.close() - return - - def xmlCoreString(self): - jttemp="""\ -<?xml version="1.0"?> -<!-- MB: Search for RUNNUMBER to replace the run input collections and setting --> -<!-- MB: xml file validation: http://www.hep.ucl.ac.uk/atlas/AtlasTesting/DTD/onlineValidation/validator.html --> -<!-- <!DOCTYPE unifiedTestConfiguration SYSTEM "https://test-rtt.web.cern.ch/test-rtt/brinick/Results/unifiedTestConfiguration.dtd"> --> <!-- xmltest --> -<!DOCTYPE unifiedTestConfiguration SYSTEM "http://www.hep.ucl.ac.uk/atlas/AtlasTesting/DTD/unifiedTestConfiguration.dtd"> <!-- official --> - -<unifiedTestConfiguration> - -<atn/> -<kv/> -<rtt xmlns="http://www.hep.ucl.ac.uk/atlas/AtlasTesting/rtt"> -<rttContactPerson>Renaud Bruneliere</rttContactPerson> -<refRelease>14.5.0</refRelease> - -<jobList> -<chain> -<chainName>Tier0ChainJob</chainName> <!-- Only letters allowed, no white space or punctuation --> -<sequential> - -<!-- Get latest AMI tag configuration --> -<chainElement> -<jobTransform> -<doc>getAMITag</doc> -<jobTransformJobName>DummyJob_FirstRTTjobSometimesFails</jobTransformJobName> -<jobTransformCmd> -tct_getAmiTag.py f160 ami_recotrf.cmdargs -</jobTransformCmd> -<group>TCTTransform</group> -<queue>short</queue> -<batchWallTime>15</batchWallTime> -</jobTransform> -<chainfileout>ami_recotrf.cmdargs</chainfileout> -</chainElement> - -</sequential> -</chain> -</jobList> - -<jobGroups> - -<jobGroup name="TCTTransform" parent="Transform"> -<keepFilePattern>*.root</keepFilePattern> -<keepFilePattern>*.pdf</keepFilePattern> -<keepFilePattern>*.log</keepFilePattern> -<keepFilePattern>*.html</keepFilePattern> -<keepFilePattern>*.summary.txt</keepFilePattern> -<keepFilePattern>*MyCOOL_histo.db</keepFilePattern> -<keepFilePattern>*.pickle</keepFilePattern> -<keepFilePattern>*.gpickle</keepFilePattern> -</jobGroup> - -</jobGroups> - -</rtt> -</unifiedTestConfiguration> - """ - return jttemp - - def xmlRecoString(self): - jttemp="""\ - <chainElement> - <jobTransform> <doc>RecoTrf</doc> - <jobTransformJobName> JobName </jobTransformJobName> - <jobTransformCmd> - TrfCommand - </jobTransformCmd> - <group>TCTTransform</group> - <queue>long</queue> - <batchWallTime>420</batchWallTime> - <castorSvcClass>t0atlas</castorSvcClass> - <test> - <modulename>RttLibraryTools</modulename> - <testname>FileGrepper</testname> - <arg> - <argname>inputFile</argname> - <argvalue>*_log</argvalue> - </arg> - <arg> - <argname>outputFile</argname> - <argvalue> SummaryName </argvalue> - </arg> - <arg> - <argname>searchList</argname> - <argvalue>All files copied from castor are ok</argvalue> - </arg> - </test> - </jobTransform> - </chainElement> - """ - return jttemp - - def xmlDQWebString(self): - jttemp="""\ - <chainElement> - <jobTransform> - <doc>DQJob</doc> - <jobTransformJobName>JobName</jobTransformJobName> - <jobTransformCmd> - TrfCommand - </jobTransformCmd> - - <group>TCTTransform</group> - <chaindataset_info> - <jobTransformData/> - <chaindatasetName>myMergedMonitoring_IDCosmic.root</chaindatasetName> - <dataset_info> - <jobTransformData/> - <datasetName>/afs/cern.ch/atlas/offline/external/FullChainTest/tier0-vol3/rtt/fallbackfiles/myMergedMonitoring_IDCosmic_0.root</datasetName> - </dataset_info> - </chaindataset_info> - - <queue>short</queue> - <batchWallTime>60</batchWallTime> - - </jobTransform> - </chainElement> - """ - return jttemp - -x=tct_generateXML() -# Create core of XML file -# mydom = x.createCore() - -# Add list of reconstruction jobs -# recoel = mydom.createElement("parallel") -# mydom.getElementsByTagName("sequential")[0].appendChild(recoel) - -#for runnum in runnums: -# if runnum == "Latest": -# streams = lateststreams -# inputloc=castorloc -# else: -# inputloc=castorloc+runnum -# streams = os.popen("nsls %s" % inputloc).readlines() -# pass -# for stream in x.outputStreams(runnum): -# for seq in range(1,3): -# x.createRecoCE(mydom,runnum,stream,seq) - -x.generateRecoJobs() -x.generateMergeJobs() -# x.generateDQWebDisplays() - -# Write XML file -filename = 'Tier0ChainTests_test_AtlasTier0.xml' -x.WriteXMLFile(filename,False) diff --git a/Tools/Tier0ChainTests/scripts/tct_getAmiTag.py b/Tools/Tier0ChainTests/scripts/tct_getAmiTag.py deleted file mode 100755 index cc7e8fa39d5ec8e53de63ce158f8b812d5ea12c2..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_getAmiTag.py +++ /dev/null @@ -1,123 +0,0 @@ -#! /usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -# script for retrieving ami tag and converting it into recotrf arguments -# MB 20090226 -# original : /afs/cern.ch/atlas/www/cgi-bin/t0AMIupdater.cgi - -from datetime import datetime -import cgi, commands, pickle, sys, string, re, os -sys.path.insert(0,'/afs/cern.ch/atlas/project/tzero/prod1/code') -import Eowyn.luxml as luxml - - -def makePickle(argdict,outputpickle): - file = open(outputpickle,'w') - pickle.dump(argdict,file) - file.close() - return - - -def getAMITag(amitag): - amioutput = amitag+".gpickle" - amicmd = 'source /afs/cern.ch/user/a/albrand/public/pyAMI/pyAMI_setup.sh ; ' - amicmd += 'python /afs/cern.ch/user/a/albrand/public/pyAMI/python/commands/nomenclature/amiListConfigurationTag.py -configTag=%s -output=%s' % (amitag, amioutput) - s,o = commands.getstatusoutput(amicmd) - if s: - print "error: %s\nOutput message:\n%s" %(str(s),o) - sys.exit(1) - found = os.path.exists( amioutput ) - return found,amitag,amioutput - - -def getLatestAMITag(idx=90): - tagfound = True - while tagfound: - amitag='f%d' % idx - tagfound,amitag,amioutput = getAMITag(amitag) - if tagfound: - s,o = commands.getstatusoutput("rm -f %s" % amioutput) - idx+=1 - continue; - else: idx-=1 - return getAMITag('f%d'%idx) - - -def amiSummary(amitag,amimap) : - sfile=open(amitag+'.txt','w') - keys=amimap.keys() - keys.sort() - rl=['tag','productionStep','readStatus','writeStatus','description'] - for r in rl : - keys.remove(r) - summaryString = '%s: %s\n' % ('tag',str(amimap['tag'])) - summaryString += '%s: %s\n' % ('productionStep',str(amimap['productionStep'])) - for key in keys : - summaryString += '%s: %s\n' % (key,str(amimap[key])) - summaryString += '%s: %s\n' % ('description',str(amimap['description']).replace('\n',' ')) - sfile.write(summaryString) - sfile.close() - return summaryString - - -def checkConfigMap(amimap): - msg='' - moreInfoDict=luxml.toPy(amimap['moreInfo']) - if moreInfoDict['phconfig'].has_key('conditionsTag') : - if moreInfoDict['phconfig']['conditionsTag'] != str(amimap['ConditionsTag']): - msg+='Conditions tag does not match\n' - #swRelCache=moreInfoDict['tasktransinfo']['trfsetupcmd'].split(' ')[3] - #if swRelCache != str(amimap['SWReleaseCache']): - # msg+='SW release cache does not match\n' - if moreInfoDict['tasktransinfo']['trfpath'] != (str(amimap['transformation'])).split(',')[0]: - msg+='Trf path does not match\n' - return msg - - -## main -if __name__ == "__main__": - - if len(sys.argv) < 2: - print "usage: ", sys.argv[0], "<amitag> [<runargsfile>] [<outputpickle>]" - sys.exit(1) - - recoargsfile='recotrf.cmdargs' - if len(sys.argv) >= 3: - recoargsfile = sys.argv[2] - - outputpickle='recoargdict.pickle' - if len(sys.argv) >= 4: - outputpickle = sys.argv[3] - - from PATJobTransforms.ArgDicTools import GetInfoFromAMI - configmap=GetInfoFromAMI(sys.argv[1])['amiPhysDic'] - - stringToWrite='' - for key in configmap: - if type(configmap[key]) == str: - if 'preExec' in key or 'postExec' in key: - stringToWrite += "%s='%s' " % (key,configmap[key]) - else: - stringToWrite += "%s=%s " % (key,configmap[key]) - pass - elif type(configmap[key]) == int: - stringToWrite += "%s=%d " % (key,configmap[key]) - else: - stringToWrite += "%s" % key - pass - try: - fH=open(recoargsfile,'w') - except: - print "Cannot open recoargs file. Exit." - sys.exit(2) - fH.write(stringToWrite) - fH.close() - makePickle(configmap,outputpickle) - - ## print summary of configuration - print "\nSummary of amitag <%s>" % sys.argv[1] - print "------------------------" - print ">> Recotrf arguments for amitag <%s> stored in <%s> and <%s>\n" % (sys.argv[1],recoargsfile,outputpickle) - print "ErrorCode=0 (OK)" - diff --git a/Tools/Tier0ChainTests/scripts/tct_getESDTAGcomparison.py b/Tools/Tier0ChainTests/scripts/tct_getESDTAGcomparison.py deleted file mode 100755 index 8b1c1a57203291161cc4776f4c9a0549f7ada8a3..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_getESDTAGcomparison.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import os,sys -from get_release_2017 import (fn2dt, _get_release) - -__author__ = 'Renaud Bruneliere <Renaud.Bruneliere@cern.ch>' -__doc__ = 'Call compareTCTs.py from Walter Lampl available in Tools/PROCTools.' - -## main -if __name__ == "__main__": - - - try: - os.environ["AtlasBuildBranch"] - except KeyError: - print "Please set the environment variable AtlasBuildBranch" - sys.exit(1) - - try: - os.environ["AtlasBuildStamp"] - except KeyError: - print "Please set the environment variable AtlasBuildStamp" - sys.exit(1) - - release = os.environ["AtlasBuildBranch"] - timeStamp = os.environ["AtlasBuildStamp"] - nightly = _get_release(fn2dt(timeStamp)) - - if nightly == 'rel_0': - prevnightly = 'rel_6' - else: - prevnightly = 'rel_%d' % (int(nightly.split('_')[1])-1) - pass - - print ('tct_getESDTAGcomparison.py : compare %s,%s with %s,%s' % (release,prevnightly,release,nightly)) - cmd = 'compareTCTs.py --nightly=%s --rRef=%s --rVal=%s --sum=summaryESDTAGcomp.txt' % (release,prevnightly,nightly) - print "Actual command (useful for reproducing locally):\n\n%s\n" % cmd - stat=os.system(cmd) - if (stat==0): - print "ErrorCode=0 (OK)" - else: - print "ErrorCode=",stat - sys.exit(stat) - pass diff --git a/Tools/Tier0ChainTests/scripts/tct_getESDTAGcomparison_diffroot.py b/Tools/Tier0ChainTests/scripts/tct_getESDTAGcomparison_diffroot.py deleted file mode 100755 index 6e571d00d0b9aaa869c23de67e8f98a9e856ef10..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_getESDTAGcomparison_diffroot.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import os,sys -from get_release_2017 import (fn2dt, _get_release) - -__author__ = 'Renaud Bruneliere <Renaud.Bruneliere@cern.ch>' -__doc__ = 'Call compareTCTs.py from Walter Lampl available in Tools/PROCTools.' - -## main -if __name__ == "__main__": - - - try: - os.environ["AtlasBuildBranch"] - except KeyError: - print "Please set the environment variable AtlasBuildBranch" - sys.exit(1) - - try: - os.environ["AtlasBuildStamp"] - except KeyError: - print "Please set the environment variable AtlasBuildStamp" - sys.exit(1) - - release = os.environ["AtlasBuildBranch"] - timeStamp = os.environ["AtlasBuildStamp"] - nightly = _get_release(fn2dt(timeStamp)) - - if nightly == 'rel_0': - prevnightly = 'rel_6' - else: - prevnightly = 'rel_%d' % (int(nightly.split('_')[1])-1) - pass - - print ('tct_getESDTAGcomparison.py : compare %s,%s with %s,%s' % (release,prevnightly,release,nightly)) - cmd = 'compareTCTs.py --nightly=%s --rRef=%s --rVal=%s --sum=summaryESDTAGcomp.txt --diff-root=True' % (release,prevnightly,nightly) - print "Actual command (useful for reproducing locally):\n\n%s\n" % cmd - stat=os.system(cmd) - if (stat==0): - print "ErrorCode=0 (OK)" - else: - print "ErrorCode=",stat - sys.exit(stat) - pass diff --git a/Tools/Tier0ChainTests/scripts/tct_getstatus.py b/Tools/Tier0ChainTests/scripts/tct_getstatus.py deleted file mode 100755 index f9dd4fc701ffbf317def85450795e3f7304c0da9..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_getstatus.py +++ /dev/null @@ -1,224 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -__author__ = 'Renaud Bruneliere <Renaud.Bruneliere@cern.ch>' -__doc__ = 'Print tct status. Usage tct_getstatus.py -h' - -import os, sys, datetime, glob -import xml.etree.ElementTree as eTree # HAZ - -tctdir = '/afs/cern.ch/atlas/project/RTT/prod/Results/rtt' -jobstatus = ['\033[31mFailure\033[0m', '\033[32mSuccess\033[0m', '\033[37mN/A\033[m'] -jobstatusNoColor = ['Failure', 'Success', 'N/A'] - -def parseCmdLine(args,itoday): - # Parse options - from optparse import OptionParser - parser = OptionParser() - parser.add_option("-r","--release", dest="release", - help="Which release ? (15.5.X.Y,...)", default='15.5.X.Y') - parser.add_option("-d","--day", dest="day", - help="Which release number rel_(0-6) ?", - default=('rel_%d' % itoday)) - parser.add_option("--dq", dest="dq", - help="Dump data quality results", - action='store_true',default=False) - parser.add_option("--dump", dest="dump", - help="Dump results into a text file", - action='store_true',default=False) - (config, args) = parser.parse_args(sys.argv[1:]) - for arg in args: - print "gettctstatus.parseCmdLine(): option ",arg,"is not supported" - pass - return config - -def getJobResults(workdir,config): - # Print out table header - print '%-65s %-14s %-14s %-9s %-9s' % ('Job', 'Start', 'End', 'Test', 'Output') - if config.dump: # Have column headings also show up in textdump/emails - config.file.write('%-65s %-14s %-16s %-9s %-9s\n' % ('--Job--', '--Start--', '--End--', '--Test--', '--Output--')) - # Loop over jobs - jobs = [j for j in os.listdir(workdir)] - jobs.sort() - for job in jobs: - if not os.path.isdir(workdir+'/'+job): continue - if not os.path.isfile('%s/%s/rttjobinfo.xml' % (workdir,job)): continue # check only rtt job folders - if not len(os.listdir(workdir+'/'+job)): - str = '%40s \033[31mFolder not found\033[0m' % (job) - strNoCol = '%40s Folder not found' % (job) - print str - if config.dump: config.file.write(strNoCol+'\n') - continue - jobnum = (os.listdir(workdir+'/'+job))[0] - jobLogPath = '%s/%s/%s_log' % (workdir, job, job) - if os.path.isfile(jobLogPath): - # get handles on the log files for the job itself and the job script - jobLog = open(jobLogPath, "r") - scriptLogPath = '%s/%s/%s_script_log' % (workdir,job,job) - scriptLog = open(scriptLogPath, "r") - # now let's get the start and end times for the job - - startTimeString = "Unknown" - endTimeString = "Unknown" - - for line in scriptLog: - if "Started at" in line: - words = line.split() - startTimeString = "%3s%2s %8s" % (words[3], words[4], words[5]) - elif "Results reported on" in line: - words = line.split() - endTimeString = "%3s%2s %8s" % (words[4], words[5], words[6]) - if startTimeString != "Unknown" and endTimeString != "Unknown": - break - - # check if the batch job completed successfully and expected outputs are okay - # HAZ: Modified this to look at the xml - success = 0 - testSuc = 0 - rootTree = eTree.parse('%s/%s/rttjobinfo.xml' % (workdir, job)).getroot() - success = int(rootTree.find('jobExitCode').text) == 0 - - if success: - nTests = int(rootTree.find('tests').find('ntests' ).text) - nTestsSuc = int(rootTree.find('tests').find('ntestsuccess').text) - if nTests == 0: - testSuc = 2 - else: - testSuc = (nTests == nTestsSuc) - - jobLine = '%-65s %14s - %14s %9s %-9s' % (job,startTimeString,endTimeString,jobstatus[success], jobstatus[testSuc]) - jobLineNoCol = '%-65s %14s - %14s %9s %-9s' % (job,startTimeString,endTimeString,jobstatusNoColor[success], jobstatusNoColor[testSuc]) - - else: - jobLineNoCol = jobLine = '%-65s >>>>>>>>>>>>>>>>> NOT STARTED YET <<<<<<<<<<<<<<<<<' % (job) - pass - print jobLine - if config.dump: - config.file.write(jobLineNoCol+'\n') - pass - return - -def getDQResults(workdir,config): - # Loop over jobs - for job in os.listdir(workdir): - if not job.startswith('DQFlagsSummary_'): continue - jobnum = (os.listdir(workdir+'/'+job))[0] - if not os.path.isfile('%s/%s/%s_log' % (workdir,job,job)): continue - dqtitlestr = "DQ results for %s:" % job.split('_')[1] - dqstruline = '='*len(dqtitlestr) - print "\n",dqtitlestr,'\n',dqstruline,'\n' - if config.dump: config.file.write('\n'+dqtitlestr+'\n'+dqstruline+'\n\n') - fdq = open('%s/%s/%s_log' % (workdir,job,job)) - if 'ComparisonWithYesterday' in job: - printYesterdayComparison(fdq,config) - elif 'ComparisonWithRelease' in job: - printReleaseComparison(fdq,config) - else: - print "gettctstatus.getDQResults():",job,"is not defined !" - pass - pass - return - -def printYesterdayComparison(fdq,config): - startanalyze = False - for line in fdq: - if 'top_level' in line: startanalyze = True - if not startanalyze: continue - if not (line.startswith('<top_level>') or line.startswith(' ')): continue - if line.startswith(' '): continue - lline = line.strip('\n').lstrip().split() - if lline[0] == 'under': continue - if 'Green' in lline[1]: - test = '\033[32mGreen\033[0m' - elif 'Red' in lline[1]: - test = '\033[31mRed\033[0m' - elif 'Undefined' in lline[1]: - test = '\033[33mUndefined\033[0m' - pass - strline = '%20s %10s' % (lline[0],test) - strlineNoCol = '%20s %10s' % (lline[0],lline[1]) - print strline - if config.dump: config.file.write(strlineNoCol+'\n') - pass - return - -def printReleaseComparison(fdq,config): - startanalyze = False - for line in fdq: - if 'Difference in dq flag lists between today and yesterday' in line: - startanalyze = True - continue - if not startanalyze: continue - #print line - llist = line.split('--- >') - for i in range(len(llist)-1): - l1 = llist[i].strip().lstrip().split() - l2 = llist[i+1].strip().lstrip().split() - iitem = 0 - for i,item in enumerate(l1): - if item != '<': continue - itemname = 'ERROR' - if i+1 < len(l1): itemname = l1[i+1] - itemflag1 = 'ERROR' - if i+2 < len(l1): itemflag1 = l1[i+2] - itemflag2 = 'ERROR' - if 4*iitem+1 < len(l2): itemflag2 = l2[4*iitem+1] - iitem = iitem + 1 - if 'Green' in itemflag1: - test1 = '\033[32mGreen\033[0m' - elif 'Red' in itemflag1: - test1 = '\033[31mRed\033[0m' - elif 'Undefined' in itemflag1: - test1 = '\033[33mUndefined\033[0m' - elif 'Yellow' in itemflag1: - test1 = '\033[33mYellow\033[0m' - elif 'Disabled' in itemflag1: - test1 = '\033[34mDisabled\033[0m' - pass - if 'Green' in itemflag2: - test2 = '\033[32mGreen\033[0m' - elif 'Red' in itemflag2: - test2 = '\033[31mRed\033[0m' - elif 'Undefined' in itemflag2: - test2 = '\033[33mUndefined\033[0m' - elif 'Yellow' in itemflag2: - test2 = '\033[33mYellow\033[0m' - elif 'Disabled' in itemflag2: - test2 = '\033[34mDisabled\033[0m' - pass - if test1 == test2: continue - strline = '%40s %20s -> %20s' % (itemname,test1,test2) - strlineNoCol = '%40s %10s -> %10s' % (itemname,itemflag1.strip().lstrip(),itemflag2.strip().lstrip()) - print strline - if config.dump: config.file.write(strlineNoCol+'\n') - pass - pass - break - if not startanalyze: - print " No difference found" - if config.dump: config.file.write(' No difference found\n') - pass - return - -def main(): - itoday = datetime.date.today().isoweekday() - if itoday == 7: itoday = 0 - config = parseCmdLine(sys.argv[1:],itoday) - if config.dump: config.file = open('tctstatus.txt','w') - title = "Summary for %s %s:" % (config.release,config.day) - titleuline = "="*len(title) - print "\n",title,"\n",titleuline,"\n" - if config.dump: config.file.write("\n"+title+"\n"+titleuline+"\n\n") - workdir = tctdir+'/'+config.day+'/'+config.release+'/build' - workdir = workdir+'/'+(os.listdir(workdir))[0] - workdir = workdir+'/'+(os.listdir(workdir))[0] - workdir = workdir+'/Tier0ChainTests' - getJobResults(workdir,config) - if config.dq: getDQResults(workdir,config) - if config.dump: config.file.close() - return - -if __name__ == '__main__': - main() - pass diff --git a/Tools/Tier0ChainTests/scripts/tct_mergeMonHistograms.sh b/Tools/Tier0ChainTests/scripts/tct_mergeMonHistograms.sh deleted file mode 100755 index 55e3efe56ffc7d56fb7078bff458e69bbe5266e0..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_mergeMonHistograms.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/sh - -## Check arguments -if [ $# -lt 2 ] ; then - echo "Usage: tct_mergeHistograms.sh <outputfile> <inputfile1> [<inputfile2,3,4,etc>]" - exit 1 -fi - -## intercept dryrun option if given -DRYRUN=0 -if [ "$1" = "-d" ] ; then - DRYRUN=1 - shift -fi - -## setup configuration -file_histMergeLog=hist_merge.log -file_histMergeList=hist_merge_list.txt -file_histOut=$1 -shift - -rm -f $file_histMergeLog $file_histMergeList $file_histOut -touch $file_histMergeList - -echo -while [ $# -gt 0 ] ; -do - file_histIn=$1 - shift - if [ -f $file_histIn ]; then - echo "Adding to merge list: " $file_histIn - echo $file_histIn >> $file_histMergeList - else - echo "Input histogram file not found: <" $file_histIn ">. Skipped." - fi -done - -## do the merging -RUNCMD="DQHistogramMerge.py $file_histMergeList $file_histOut True" - -echo -echo ">> Now running command:" -echo ">> ====================" -echo "$RUNCMD" -echo -if [ $DRYRUN -ne 1 ]; then - echo "(Now merging histogram(s). This may take some time.)" - $RUNCMD - #$RUNCMD 1>$file_histMergeLog 2>&1 -fi -echo -echo ">> Finished command:" -echo ">> =================" -echo "$RUNCMD" -echo - -## temporary patch, to be fixed in DQHistogramMerge.py -echo "ErrorCode=0 (OK)" - diff --git a/Tools/Tier0ChainTests/scripts/tct_mergeValNtup.sh b/Tools/Tier0ChainTests/scripts/tct_mergeValNtup.sh deleted file mode 100755 index 0201ce50e17a6c4f9c227f907ccfd433b2223b00..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_mergeValNtup.sh +++ /dev/null @@ -1,76 +0,0 @@ -#!/bin/sh - -## Check arguments -if [ $# -lt 2 ] ; then - echo "Usage: tct_mergeValNtup.sh <outputfile> <inputfile1> [<inputfile2,3,4,etc>]" - exit 1 -fi - -## intercept dryrun option if given -DRYRUN=0 -if [ "$1" = "-d" ] ; then - DRYRUN=1 - shift -fi - -## setup configuration -file_valNtupMergeLog=valNtup_merge.log -file_valNtupMergeList=valNtup_merge_list.txt -file_valNtupOut=$1 -shift - -rm -f $file_valNtupMergeLog $file_valNtupMergeList $file_valNtupOut -touch $file_valNtupMergeList - -echo -while [ $# -gt 0 ] ; -do - file_valNtupIn=$1 - shift - if [ -f $file_valNtupIn ]; then - echo "Adding to merge list: " $file_valNtupIn - echo $file_valNtupIn >> $file_valNtupMergeList - else - echo "Input ntuple file not found: <" $file_valNtupIn ">. Skipped." - fi -done - -## build pickle input file -BLDCMD="tct_buildPickleFile.py valNtup.pkl $file_valNtupMergeList $file_valNtupOut" - -echo -echo ">> Now building pickle file:" -echo ">> =========================" -echo "$BLDCMD" -echo -if [ $DRYRUN -ne 1 ]; then - $BLDCMD -fi -echo -echo ">> Finished command:" -echo ">> =================" -echo "$BLDCMD" -echo - -## do the merging -RUNCMD="/afs/cern.ch/atlas/tzero/prod1/inst/projects/data11/trfs/ROOTMerge_trf.py --argdict=valNtup.pkl" - -echo -echo ">> Now running command:" -echo ">> ====================" -echo "$RUNCMD" -echo -if [ $DRYRUN -ne 1 ]; then - echo "(Now merging histogram(s). This may take some time.)" - $RUNCMD - #$RUNCMD 1>$file_valNtupMergeLog 2>&1 -fi -echo -echo ">> Finished command:" -echo ">> =================" -echo "$RUNCMD" -echo - -## temporary patch, to be fixed in DQHistogramMerge.py -echo "ErrorCode=0 (OK)" - diff --git a/Tools/Tier0ChainTests/scripts/tct_readxmlcmds.py b/Tools/Tier0ChainTests/scripts/tct_readxmlcmds.py deleted file mode 100755 index 0d7c706bd340c422b0424423dccc3eb890ef39e1..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_readxmlcmds.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import sys -import os, os.path - -## main -if __name__ == "__main__": - - if len(sys.argv) < 2: - print "usage: ", sys.argv[0], "<xmlfile>" - sys.exit(1) - - xmlfile=sys.argv[1] - if not os.path.exists( xmlfile ): - print "Cannot find file <%s>. Exit." % xmlfile - sys.exit(1) - - cmdArr = [] - cmdfound = False - lineArr = open(xmlfile,"r").readlines() - - for line in lineArr: - ## skip empty lines - line = line.strip() - if len(line)==0: continue - - if line.find("/jobTransformCmd")>0: - cmdfound = False - continue - if cmdfound: - arr = line.split("<!--") - cmdArr.append(arr[0]) - cmdfound = False - continue - if line.find("jobTransformCmd")>0: - cmdfound = True - continue - - print "## runcmds found in <%s> : \n" % xmlfile - for cmd in cmdArr: - print "##" - print cmd - print "" - diff --git a/Tools/Tier0ChainTests/scripts/tct_recotrf.py b/Tools/Tier0ChainTests/scripts/tct_recotrf.py deleted file mode 100755 index 20436b27ed22906b704b06085b5b70099e694375..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_recotrf.py +++ /dev/null @@ -1,456 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import os,sys,subprocess,datetime,time,shutil - -__author__ = "Renaud Bruneliere <Renaud.Bruneliere@cern.ch>" -__doc__ = """Python script used by tct to run the Reco_Trf.py transform""" - -castortct = '/eos/atlas/atlascerngroupdisk/proj-sit/tct/rec_input' -#castortct = '/castor/cern.ch/atlas/atlascerngroupdisk/proj-sit/tct/rec_input' -tct_stage_svcclass = 'atlasgroupdisk' -fallbackfiles = {'IDCosmic':['/afs/cern.ch/atlas/offline/external/FullChainTest/tier0-vol3/rtt/fallbackfiles/data09_cos.00135664.physics_IDCosmic.daq.RAW._lb0000._SFO-2._0001.data','/afs/cern.ch/atlas/offline/external/FullChainTest/tier0-vol3/rtt/fallbackfiles/data09_cos.00135664.physics_IDCosmic.daq.RAW._lb0000._SFO-2._0002.data','/afs/cern.ch/atlas/offline/external/FullChainTest/tier0-vol3/rtt/fallbackfiles/data09_cos.00135664.physics_IDCosmic.daq.RAW._lb0000._SFO-2._0003.data'], - 'MinBias':['/afs/cern.ch/atlas/offline/external/FullChainTest/tier0-vol3/rtt/fallbackfiles/data09_900GeV.00141749.physics_MinBias.merge.RAW._lb0022._0001.1','/afs/cern.ch/atlas/offline/external/FullChainTest/tier0-vol3/rtt/fallbackfiles/data09_900GeV.00141749.physics_MinBias.merge.RAW._lb0023._0001.1','/afs/cern.ch/atlas/offline/external/FullChainTest/tier0-vol3/rtt/fallbackfiles/data09_900GeV.00141749.physics_MinBias.merge.RAW._lb0024._0001.1'], - } -nfilestct = 10 # Maximum number of RAW data files stored on proj-sit for a given runNumber and stream -tct_stage_host = 'castoratlast3' -tct_stage_svcclass = 'atlascerngroupdisk' -retcode = 0 # Exit code returned by script (if non-zero) and grepped by RTT - -def checkTCTCastor(runnumber,trigStream,castordir,nfiles,lb): - print ('tct_recotrf.py : checkTCTCastor : starting...') - test = False - listlb = [] - if lb != "": - if ',' in lb: - listlb = [int(lb.split(',')[0]),int(lb.split(',')[1])] - else: - listlb = [int(lb)] - pass - pass - if len(listlb): - print ('tct_recotrf.py : checkTCTCastor : check only files with lumiblocks within %s' % (listlb)) - pass - if 'castor' in castortct and 'tzero' in castortct: - fstrlist = os.popen('nsls -l %s/%s/physics_%s/' % (castortct,runnumber,trigStream)) - elif 'castor' in castortct: - fstrlist = os.popen('xrd castoratlas ls %s/%s/physics_%s/' % (castortct,runnumber,trigStream)) - elif 'eos' in castortct: - fstrlist = os.popen('xrd eosatlas ls %s/%s/physics_%s/' % (castortct,runnumber,trigStream)) - nGoodFiles = 0 - strlist = fstrlist.readlines() - for istr,stream in enumerate(strlist): - if strlist[istr].strip('\n') == "": continue - strlist[istr] = strlist[istr].strip('\n').split()[len(strlist[istr].split())-1] - if not trigStream in strlist[istr]: continue - if len(listlb) == 1 and not ('_lb%0.4d' % listlb[0]) in strlist[istr]: continue - if len(listlb) == 2: - mylb = int(strlist[istr].split('_lb')[1].split('.')[0]) - if mylb < listlb[0] or mylb > listlb[1]: continue - pass - nGoodFiles += 1 - print ('tct_recotrf.py : checkTCTCastor : %s is good file, nGoodFiles = %d' % (strlist[istr],nGoodFiles)) - pass - if nGoodFiles >= nfiles: test = True - if nGoodFiles < nfiles and len(strlist) < nfilestct - nfiles: test = updateTCTCastor(runnumber,trigStream,castordir,nfiles,listlb) - print ('tct_recotrf.py : checkTCTCastor : test = %d ...end' % (test)) - return test - -def updateTCTCastor(runnumber,trigStream,castordir,nfiles,listlb): - test = True - print ('tct_recotrf.py : updateTCTCastor : starting...') - init_stage_host = os.environ['STAGE_HOST'] - init_stage_svcclass = os.environ['STAGE_SVCCLASS'] - if init_stage_svcclass == 't0atlas' and init_stage_host != 'castoratlas': - init_stage_host = 'castoratlas' - os.environ['STAGE_HOST'] = tct_stage_host - os.environ['STAGE_SVCCLASS'] = tct_stage_svcclass - #os.system('rfmkdir %s/%s' % (castortct,runnumber)) - #os.system('rfchmod 777 %s/%s' % (castortct,runnumber)) - #os.system('rfmkdir %s/%s/physics_%s' % (castortct,runnumber,trigStream)) - #os.system('rfchmod 777 %s/%s/physics_%s' % (castortct,runnumber,trigStream)) - if 'castor' in castortct: - os.system('xrd castoratlas mkdir %s/%s' % (castortct,runnumber)) - os.system('xrd castoratlas chmod %s/%s 7 7 7' % (castortct,runnumber)) - os.system('xrd castoratlas mkdir %s/%s/physics_%s' % (castortct,runnumber,trigStream)) - os.system('xrd castoratlas chmod %s/%s/physics_%s 7 7 7' % (castortct,runnumber,trigStream)) - elif 'eos' in castortct: - os.system('xrd eosatlas mkdir %s/%s' % (castortct,runnumber)) - os.system('xrd eosatlas chmod %s/%s 7 7 7' % (castortct,runnumber)) - os.system('xrd eosatlas mkdir %s/%s/physics_%s' % (castortct,runnumber,trigStream)) - os.system('xrd eosatlas chmod %s/%s/physics_%s 7 7 7' % (castortct,runnumber,trigStream)) - os.environ['STAGE_HOST'] = init_stage_host - os.environ['STAGE_SVCCLASS'] = init_stage_svcclass - if 'castor' in castordir and 'tzero' in castordir: - ffilelist = os.popen('nsls -l %s' % (castordir)) - elif 'castor' in castordir: - ffilelist = os.popen('xrd castoratlas ls %s' % (castordir)) - elif 'eos' in castordir: - #ffilelist = os.popen('xrd eosatlas ls %s' % (castordir)) - ffilelist = os.popen('eos ls %s' % (castordir)) - nGoodFiles = 0 - print "tct_recotrf.py : updateTCTCastor : look for files in:", castordir - for ifile,filestr in enumerate(ffilelist): - filestr = filestr.strip('\n') - if filestr == "": continue - filename = os.path.basename(filestr.split()[len(filestr.split())-1]) - if not trigStream in filename: continue - if len(listlb) == 1 and not ('_lb%0.4d' % listlb[0]) in filename: continue - if len(listlb) == 2: - mylb = int(filename.split('_lb')[1].split('.')[0]) - if mylb < listlb[0] or mylb > listlb[1]: continue - pass - os.environ['STAGE_HOST'] = init_stage_host - os.environ['STAGE_SVCCLASS'] = init_stage_svcclass - os.system('echo STAGE_HOST=$STAGE_HOST STAGE_SVCCLASS=$STAGE_SVCCLASS') - print ('tct_recotrf.py : updateTCTCastor : copying %s/%s to %s' % (castordir,filename,os.getcwd())) - if not config.dryrun_: - if 'castor' in castordir and 'tzero' in castordir: - os.system('time rfcp %s/%s .' % (castordir,filename)) - elif 'castor' in castordir: - os.system('time xrdcp root://castoratlas/%s/%s .' % (castordir,filename)) - elif 'eos' in castordir: - os.system('time xrdcp root://eosatlas.cern.ch/%s/%s .' % (castordir,filename)) - os.environ['STAGE_HOST'] = tct_stage_host - os.environ['STAGE_SVCCLASS'] = tct_stage_svcclass - os.system('echo STAGE_HOST=$STAGE_HOST STAGE_SVCCLASS=$STAGE_SVCCLASS') - print ('tct_recotrf.py : updateTCTCastor : copying %s to %s/%s/physics_%s' % (filename,castortct,runnumber,trigStream)) - if not config.dryrun_: - if 'castor' in castortct and 'tzero' in castortct: - os.system('time rfcp %s %s/%s/physics_%s' % (filename,castortct,runnumber,trigStream)) - elif 'castor' in castortct: - os.system('time xrdcp %s root://castoratlas/%s/%s/physics_%s/' % (filename,castortct,runnumber,trigStream)) - elif 'eos' in castortct: - os.system('time xrdcp %s root://eosatlas.cern.ch/%s/%s/physics_%s/' % (filename,castortct,runnumber,trigStream)) - if not config.dryrun_: os.system('rm -f %s' % (filename)) - nGoodFiles += 1 - if nGoodFiles == nfiles: break # take only the first 10 files - pass - os.environ['STAGE_HOST'] = init_stage_host - os.environ['STAGE_SVCCLASS'] = init_stage_svcclass - print ('tct_recotrf.py : updateTCTCastor : ...end') - return test - -def copyTCTCastor(jobnum,runnumber,trigStream,nfiles,lb,castordir,usecastortct,readRDO=False): - init_stage_host = os.environ['STAGE_HOST'] - init_stage_svcclass = os.environ['STAGE_SVCCLASS'] - if init_stage_svcclass == 't0atlas' and init_stage_host != 'castoratlas': - init_stage_host = 'castoratlas' - listlb = [] - if lb != "": - if ',' in lb: - listlb = [int(lb.split(',')[0]),int(lb.split(',')[1])] - else: - listlb = [int(lb)] - if usecastortct or (castortct in castordir): - os.environ['STAGE_HOST'] = tct_stage_host - os.environ['STAGE_SVCCLASS'] = tct_stage_svcclass - if usecastortct: - mycastordir = '%s/%s/physics_%s' % (castortct,runnumber,trigStream) - else: - mycastordir = castordir - print ('tct_recotrf.py : copyTCTCastor : STAGE_HOST=%s' % (os.environ['STAGE_HOST'])) - print ('tct_recotrf.py : copyTCTCastor : STAGE_SVCCLASS=%s' % (os.environ['STAGE_SVCCLASS'])) - print ('tct_recotrf.py : copyTCTCastor : look for files in %s' % (mycastordir)) - if 'castor' in mycastordir and 'tzero' in mycastordir: - ffilelist = os.popen('nsls -l %s' % (mycastordir)) - elif 'castor' in mycastordir: - ffilelist = os.popen('xrd castoratlas ls %s' % (mycastordir)) - elif 'eos' in mycastordir: - ffilelist = os.popen('xrd eosatlas ls %s' % (mycastordir)) - filelist = ffilelist.readlines() - if not len(filelist): - print ('tct_recotrf.py : copyTCTCastor : no files found in %s' % (mycastordir)) - print ('tct_recotrf.py : copyTCTCastor : => run on fallbackfiles') - if trigStream in fallbackfiles: - myfallbackfiles = fallbackfiles[trigStream] - else: - myfallbackfiles = fallbackfiles['MinBias'] - pass - for idat in range(nfiles): - ifile = idat%(len(myfallbackfiles)) - if not config.dryrun_: os.system('ln -s %s %d.data' % (myfallbackfiles[ifile],idat)) - pass - print "WARNING: Some files copied from castor were corrupted !" - os.environ['STAGE_HOST'] = init_stage_host - os.environ['STAGE_SVCCLASS'] = init_stage_svcclass - return - firstfile = jobnum*nfiles - if (jobnum+1)*nfiles > len(filelist): firstfile = (jobnum%(len(filelist)/nfiles))*nfiles - idat = 0 - nGoodFiles = 0 - for ifile,filestr in enumerate(filelist): - filestr = filestr.strip('\n') - if filestr == "": continue - filename = os.path.basename(filestr.split()[len(filestr.split())-1]) - fullfilename = filename - if not mycastordir in fullfilename: fullfilename = mycastordir + '/' + filename - if len(listlb) == 1 and not ('_lb%0.4d' % listlb[0]) in filename: - continue - elif len(listlb) == 2: - mylb = int(filename.split('_lb')[1].split('.')[0]) - if mylb < listlb[0] or mylb > listlb[1]: continue - elif ifile < firstfile: - continue - if readRDO: - print ('tct_recotrf.py : copyTCTCastor : copying %s to RDO_%d.pool.root' % (fullfilename,idat)) - else: - print ('tct_recotrf.py : copyTCTCastor : copying %s to %d.data' % (fullfilename,idat)) - if not config.dryrun_: - if 'castor' in mycastordir and 'tzero' in mycastordir: - os.system('time rfcp %s .' % (fullfilename)) - elif 'castor' in mycastordir: - os.system('time xrdcp root://castoratlas/%s .' % (fullfilename)) - elif 'eos' in mycastordir: - os.system('time xrdcp root://eosatlas.cern.ch/%s .' % (fullfilename)) - if not config.dryrun_: - if readRDO: - os.system('ln -s %s RDO_%d.pool.root' % (filename,idat)) - else: - os.system('ln -s %s %d.data' % (filename,idat)) - idat += 1 - nGoodFiles += 1 - if nGoodFiles == nfiles: break - pass - os.environ['STAGE_HOST'] = init_stage_host - os.environ['STAGE_SVCCLASS'] = init_stage_svcclass - print "All files copied from castor are ok !" - return - -def parseCmdLine(args): - """ Parse input command line to optdict. - To get the whole list of options type : tct_recotrf.py -h""" - from optparse import OptionParser - parser = OptionParser() - # The following options are specific to tct_recotrf.py command - parser.add_option("-d","--dry-run", dest="dryrun_", help="Dry running", - action='store_true', default=False) - parser.add_option("-j","--jobnum", dest="jobnum_", help="Job number", type='int', default=0) - parser.add_option("-r","--runnum", dest="runnum_", help="Run number", default='None') - parser.add_option("--lumiblock", dest="lb_", help="Lumi block(s)", default='') - parser.add_option("-c","--castordir", dest="castordir_", help="Castor path to input data", - default="/castor/cern.ch/grid/atlas/tzero/prod1/perm/data09_900GeV/physics_MinBias/0141749/data09_900GeV.00141749.physics_MinBias.merge.RAW") - parser.add_option("--latestrun", dest="uselatestrun_", help="Do we use latest run available ?", - action='store_true', default=False) - parser.add_option("--nfiles", dest="nfiles_", help="Number of input files", - type='int', default=3) - # The following options are same as Reco_trf.py command - parser.add_option("--maxEvents", dest="maxEvents", help="Maximal number of events", - type='int', default=500) - parser.add_option("--trigStream", dest="trigStream_", help="Trigger stream",default='') - parser.add_option("--AMI", dest="AMI", help="AMI tag", default='') - parser.add_option("--AMITag", dest="AMITag", help="AMI tag", default='') - parser.add_option("--autoConfiguration", dest="autoConfiguration", help="autoConfiguration", default='') - parser.add_option("--geometryVersion", dest="geometryVersion", help="geometryVersion", default='') - parser.add_option("--conditionsTag", dest="conditionsTag", help="conditionsTag", default='') - parser.add_option("--ignoreErrors", dest="ignoreErrors", help="ignoreErrors", action='store_true', default='False') - parser.add_option("--ignorePatterns", dest="ignorePatterns", help="ignorePatterns", default='') - #parser.add_option("--ignoreerrors", dest="ignoreerrors", help="ignoreErrors", action='store_true', default='NONE') - #parser.add_option("--beamType", dest="beamType", help="Beam type", default='') - parser.add_option("--preExec", dest="preExec", help="Pre-execute options (overwrite)", default='') - parser.add_option("--preExec_r2e", dest="preExec_r2e", help="Pre-execute in RAWtoESD options (overwrite)", default='') - parser.add_option("--append_preExec", dest="append_preExec", help="Pre-execute options (append)", default='') - parser.add_option("--preInclude", dest="preInclude", help="Pre-include options", default='') - parser.add_option("--postInclude", dest="postInclude", help="Post-include options", default='') - parser.add_option("--postExec", dest="postExec", help="postExec options", default='') - parser.add_option("--postExec_e2a", dest="postExec_e2a", help="postExec_e2a options", default='') - parser.add_option("--triggerConfig", dest="triggerConfig", help="Trigger configuration", default='') - parser.add_option("--RunNumber", dest="RunNumber", help="Run number", default='') - parser.add_option("--steering", dest="steering", help="transform steering", default='') - parser.add_option("--athenaopts", dest="athenaopts", help="athena options", default='') - parser.add_option("--HI", dest="doHI_", help="Run with Heavy ions settings", - action='store_true', default=False) - parser.add_option("--HIP", dest="doHIP_", help="Run with proton-lead settings", - action='store_true', default=False) - parser.add_option("--readRDO", dest="readRDO_", help="Read input RDO files", - action='store_true', default=False) - parser.add_option("--dropHIST", dest="dropHIST_", help="Drop DQ histogram from outputs", - action='store_true', default=False) - parser.add_option("--dropDESDs", dest="dropDESDs_", help="Drop DESD files from outputs", - action='store_true', default=False) - parser.add_option("--dropDRAWs", dest="dropDRAWs_", help="Drop DRAW files from outputs", - action='store_true', default=False) - parser.add_option("--dropNTUPs", dest="dropNTUPs_", help="Drop NTUP files from outputs", - action='store_true', default=False) - parser.add_option("--dropDAODs", dest="dropDAODs_", help="Drop DAOD files from outputs", - action='store_true', default=False) - #parser.add_option("--uploadtoami", dest="uploadtoami", help="Upload performance data to AMI", type='float', default=0.0) - (config, args) = parser.parse_args(args) - print 'config',config - return config - -def generateRecoTrfCmd(config): - """ Generate from command line options the list of arguments - used by Reco_trf.py """ - recocmd = "Reco_tf.py " - nfiles = config.nfiles_ - nfilescheck = 0 - for ifile in range(nfiles): - if not config.readRDO_ and not os.path.isfile('%d.data' % ifile): break - if config.readRDO_ and not os.path.isfile('RDO_%d.pool.root' % ifile): break - nfilescheck += 1 - if nfilescheck < nfiles: - print 'tct_recotrf.py : generateRecoTrfCmd : found only',nfilescheck,'data files => set it as number of files!' - nfiles = nfilescheck - if config.readRDO_: - config.inputRDOFile = ("RDO_%d.pool.root,"*nfiles) % tuple(range(nfiles)) - config.inputRDOFile = config.inputRDOFile.strip(',') - else: - config.inputBSFile = ("%d.data,"*nfiles) % tuple(range(nfiles)) - config.inputBSFile = config.inputBSFile.strip(',') - config.outputESDFile = "myESD_%s_%d.pool.root" % (config.trigStream_,config.jobnum_) - if not config.dropHIST_: - config.outputHISTFile = "myMergedMonitoring_%s_%d.root" % (config.trigStream_,config.jobnum_) - ### Updated 27.11.2016 - desdlist = ['DESDM_IDALIGN','DESDM_SGLEL','DESDM_SLTTMU', - 'DESDM_EGAMMA','DESDM_MCP','DESDM_CALJET', - 'DESDM_PHOJET','DESDM_TILEMU','DESDM_EXOTHIP'] - drawlist = ['DRAW_EGZ','DRAW_TAUMUH','DRAW_ZMUMU','DRAW_RPVLL', 'DRAW_TOPSLMU'] - ### DAOD still missing some outputs due to ATLASRECTS-3427 - #daodlist = ['DAOD_IDTIDE','DAOD_IDTRKVALID','DAOD_SCTVALID','DAOD_IDNCB'] - daodlist = ['DAOD_IDTIDE','DAOD_IDTRKVALID'] - ntuplist = [] # Obsolete - if config.doHI_: - desdlist = [] - #desdlist = ['DESD_HIRARE'] - drawlist = [] - #ntuplist = ['NTUP_HI','NTUP_TRKVALID'] - ntuplist = [] - config.outputAODFile = "myAOD_%s_%d.pool.root" % (config.trigStream_,config.jobnum_) - elif config.doHIP_: - desdlist = [] - drawlist = [] - #ntuplist = ['NTUP_HI','NTUP_MINBIAS','NTUP_TRKVALID'] - ntuplist = [] - config.outputAODFile = "myAOD_%s_%d.pool.root" % (config.trigStream_,config.jobnum_) - else: - config.outputAODFile = "myAOD_%s_%d.AOD.pool.root" % (config.trigStream_,config.jobnum_) - if not config.dropDAODs_: - for dtype in daodlist: - setattr(config,'output%sFile' % dtype, - "%s_%s_%d.AOD.pool.root" % (dtype.split('_')[1],config.trigStream_,config.jobnum_)) - #if config.beamType == 'cosmics': - # desdlist = ['DESD_PIXELCOMM','DESD_IDCOMM','DESD_CALOCOMM','DESD_MUONCOMM','DESD_TILECOMM'] - # pass - if not config.dropDESDs_: - for dtype in desdlist: - setattr(config,'output%sFile' % dtype, - "%s_%s_%d.pool.root" % (dtype.split('_')[1],config.trigStream_,config.jobnum_)) - if not config.dropDRAWs_: - for dtype in drawlist: - setattr(config,'output%sFile' % dtype, - "%s_%s_%d.data" % (dtype.split('_')[1],config.trigStream_,config.jobnum_)) - if not config.dropNTUPs_: - #config.outputNTUP_TRIGFile = "TRIG_%s_%d.root" % (config.trigStream_,config.jobnum_) - if not config.doHI_ and not config.doHIP_ : - config.outputNTUP_SCTFile = "SCT_%s_%d.root" % (config.trigStream_,config.jobnum_) - config.outputNTUP_BKGDFile = "BKGD_%s_%d.root" % (config.trigStream_,config.jobnum_) - for ntype in ntuplist: - setattr(config,'output%sFile' % ntype, - "%s_%s_%d.NTUP.root" % (ntype.split('_')[1],config.trigStream_,config.jobnum_)) - if config.ignoreErrors=="ALL": - config.ignoreErrors = "True" - for arg in vars(config): - if arg.endswith('_') or getattr(config,arg) == '': continue - recocmd += " --"+arg+"="+str(getattr(config,arg)) - return recocmd - -# -# Main job -# -print "tct_recotrf.py: starting",datetime.datetime.now() -workdir = rttdir = os.getcwd() -if "WORKDIR" in os.environ: workdir = os.environ["WORKDIR"] -print "tct_recotrf.py: working directory is",workdir -os.chdir(workdir) -config = parseCmdLine(sys.argv[1:]) -isr155xy = '15.5.X.Y' in os.environ['AtlasArea'] -# To fix bug #85744: TCT: Fatal in <TROOT::InitSystem>: HOME directory not set -os.environ["ROOTENV_NO_HOME"] = '1' - -# -# Copy input files -# -print "tct_recotrf.py: generate copy link script" -inputdir = config.castordir_ -if not config.uselatestrun_: inputdir += '/'+config.runnum_ -useTCTCastor = False -if not config.uselatestrun_: - if not castortct in config.castordir_ and not 'eos' in config.castordir_: - useTCTCastor = checkTCTCastor(config.runnum_,config.trigStream_,config.castordir_,config.nfiles_,config.lb_) - print "tct_recotrf.py: start copying files...",datetime.datetime.now() - copyTCTCastor(config.jobnum_,config.runnum_,config.trigStream_,config.nfiles_,config.lb_,config.castordir_,useTCTCastor,config.readRDO_) - print "tct_recotrf.py: ...files copying done ",datetime.datetime.now() -else: - copycmd = 'tct_generate_copyscript.py %d physics_%s %s %s %s %s' % (config.jobnum_,config.trigStream_,config.nfiles_,inputdir,str(config.uselatestrun_),config.lb_) - os.system(copycmd) - print "tct_recotrf.py: start copying files...",datetime.datetime.now() - if not config.dryrun_: os.system("./copylinkscript.sh") - print "tct_recotrf.py: ...files copying done ",datetime.datetime.now() - pass -if not config.readRDO_: - print "tct_recotrf.py: set AtlCoolT0Setup.py" - os.system('/afs/cern.ch/user/a/atlcond/utils/AtlCoolT0Setup.py') - pass - -# -# List input files -# -print "tct_recotrf.py: print list of input raw data files :" -os.system('ls -l -h *.data') -flistfiles = os.popen('ls -l *.data') -listfiles = flistfiles.readlines() -for il,line in enumerate(listfiles): - listfiles[il] = line.strip('\n').split()[len(line.split())-1] - pass - -# -# Run job transform -# - -# Environment needed for uploadtoami -os.environ['TZAMIPW'] = '/afs/cern.ch/atlas/project/RTT/prod/RTTUserFiles/vukotic/conn.txt' - -recocmd = generateRecoTrfCmd(config) -print "\ntct_recotrf.py: exact Reco_Trf.py command is \"",recocmd,"\"" -print "\ntct_recotrf.py: starting Reco_Trf.py...",datetime.datetime.now() -startTime = time.time() -if not config.dryrun_: - os.environ['DEFAULT_AF_CACHE_FNAME'] = '' - retcode = subprocess.call(recocmd, shell=True) - if retcode: raise SystemExit(retcode) - print "tct_recotrf.py: return code is set to",retcode - pass -endTime = time.time() -print "tct_recotrf.py: ...ending Reco_Trf.py. ",datetime.datetime.now() -print "tct_recotrf.py: DeltaTime = ",str(endTime-startTime),"\n" - -# -# Delete input files -# -print "tct_recotrf.py: delete input raw data files :" -for il,line in enumerate(listfiles): - print "tct_recotrf.py: remove",listfiles[il] - os.system('rm -f %s' % listfiles[il]) - pass - -# -# Copy files back to rttdir if different from workdir -# -os.chdir(rttdir) -if rttdir != workdir: - print "\ntct_recotrf.py: start copying output files to rttdir...",rttdir,datetime.datetime.now() - print "tct_recotrf.py: rttdir =",rttdir - for filename in os.listdir(workdir): - test = filename.endswith('.data') or filename in listfiles - if test: continue - if not os.path.isfile(workdir+'/'+filename): - print "tct_recotrf.py:",(workdir+'/'+filename),"is not a file, skip it" - continue - print "tct_recotrf.py: copying",filename - if not config.dryrun_: shutil.copyfile(workdir+'/'+filename,rttdir+'/'+filename) - pass - print "tct_recotrf.py: ...output files copying to rttdir done ",datetime.datetime.now() - pass -print "\ntct_recotrf.py: ending",datetime.datetime.now() diff --git a/Tools/Tier0ChainTests/scripts/tct_recotrf.sh b/Tools/Tier0ChainTests/scripts/tct_recotrf.sh deleted file mode 100755 index eb1b3671c5c663a1d8c95ac548dbd0526c661c13..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_recotrf.sh +++ /dev/null @@ -1,129 +0,0 @@ -#!/bin/sh - -## Check arguments -if [ $# -lt 1 ] ; then - echo "Usage: tct_recotrf.sh <jobid> [<trigstream>] [<nfiles>] [<nevents>] [<castorrundir>] [<amitag>] [<findlatestcastordir>] [<logfile>]" - exit 1 -fi - -## Intercept dryrun option if given -DRYRUN=0 -if [ "$1" = "-d" ] ; then - DRYRUN=1 - shift -fi - -## set configuration parameters -JOB=$1 -TRIGSTR=IDCosmic -NFILES=3 -NEVENTS=500 -CASTOR=/castor/cern.ch/grid/atlas/DAQ/2008/91890 -AMITAG=f128 -CASTORSUBDIR=False -LOGFILE= -if [ $# -gt 1 ] ; then - TRIGSTR=$2 -fi -if [ $# -gt 2 ] ; then - if [ $3 -lt 1 ] ; then - echo "Need at least one input collection. Exit." - echo "Usage: tct_recotrf.sh <jobid> [<trigstream>] [<nfiles>] [<nevents>] [<castorrundir>] [<amitag>] [<beamtype>] [<findlatestcastordir>] [<logfile>]" - exit 1 - fi - NFILES=$3 -fi -if [ $# -gt 3 ] ; then - NEVENTS=$4 -fi -if [ $# -gt 4 ] ; then - CASTOR=$5 -fi -if [ $# -gt 5 ] ; then - AMITAG=$6 -fi -if [ $# -gt 6 ] ; then - CASTORSUBDIR=$7 -fi -if [ $# -gt 7 ] ; then - BEAMTYPE=$8 -fi -if [ $# -gt 8 ] ; then - LOGFILE=$9 -fi - -## generate and run dynamic script for copying (3) files from castor (named 0.data,1.data,2.data) -date -tct_generate_copyscript.py $JOB physics_$TRIGSTR $NFILES $CASTOR $CASTORSUBDIR -if [ $DRYRUN -ne 1 ]; then - ./copylinkscript.sh -fi -date - -INFILE=0.data -count=1 -while [ $count -lt ${NFILES} ] ; -do - INFILE=${INFILE},$count.data - count=`expr $count + 1` -done - -## implement additional recotrf arguments, obtained from ami -#if [ ${PREEXEC} ]; then -# PREEXEC="preExec='${PREEXEC};rec.doPerfMon=True;rec.doDetailedPerfMon=True'" -#else -# PREEXEC="preExec='rec.doPerfMon=True;rec.doDetailedPerfMon=True'" -#fi - -MAXEVENTS="maxEvents=${NEVENTS}" - -## now run the transfrom -RUNCMD="Reco_trf.py AMI=${AMITAG} inputBSFile=${INFILE} ${MAXEVENTS} trigStream=${TRIGSTR} outputESDFile=myESD_${TRIGSTR}_${JOB}.pool.root outputAODFile=job_${TRIGSTR}_${JOB}.AOD.pool.root outputMuonCalibNtup=muonCalib_${TRIGSTR}_${JOB}.root outputPixelCalibNtup=trackVal_${TRIGSTR}_${JOB}.root HIST=myMergedMonitoring_${TRIGSTR}_${JOB}.root outputTAGComm=myTAGCOMM_${TRIGSTR}_${JOB}.root outputDESD_PIXELCOMMFile=PIXELCOMM_${TRIGSTR}_${JOB}.pool.root outputDESD_IDCOMMFile=IDCOMM_${TRIGSTR}_${JOB}.pool.root outputDESD_CALOCOMMFile=CALOCOMM_${TRIGSTR}_${JOB}.pool.root outputDESD_MUONCOMMFile=MUONCOMM_${TRIGSTR}_${JOB}.pool.root outputDESD_TILECOMMFile=TILECOMM_${TRIGSTR}_${JOB}.pool.root outputNTUP_TRIG=TRIG_${TRIGSTR}_${JOB}.root" -if [ ${POSTINC} ]; then - echo adding postInclude ${POSTINC} - RUNCMD="Reco_trf.py AMI=${AMITAG} inputBSFile=${INFILE} ${MAXEVENTS} trigStream=${TRIGSTR} postInclude=${POSTINC} outputESDFile=myESD_${TRIGSTR}_${JOB}.pool.root outputAODFile=job_${TRIGSTR}_${JOB}.AOD.pool.root outputMuonCalibNtup=muonCalib_${TRIGSTR}_${JOB}.root outputPixelCalibNtup=trackVal_${TRIGSTR}_${JOB}.root HIST=myMergedMonitoring_${TRIGSTR}_${JOB}.root outputTAGComm=myTAGCOMM_${TRIGSTR}_${JOB}.root outputDESD_PIXELCOMMFile=PIXELCOMM_${TRIGSTR}_${JOB}.pool.root outputDESD_IDCOMMFile=IDCOMM_${TRIGSTR}_${JOB}.pool.root outputDESD_CALOCOMMFile=CALOCOMM_${TRIGSTR}_${JOB}.pool.root outputDESD_MUONCOMMFile=MUONCOMM_${TRIGSTR}_${JOB}.pool.root outputDESD_TILECOMMFile=TILECOMM_${TRIGSTR}_${JOB}.pool.root outputNTUP_TRIG=TRIG_${TRIGSTR}_${JOB}.root" -fi - -if [ ${PREEXEC} ]; then - echo Adding preExec ${PREEXEC} - RUNCMD="${RUNCMD} append_preExec='${PREEXEC}'" -fi - -if [ ${BEAMTYPE} ]; then - echo Changing beam type ${BEAMTYPE} - RUNCMD="Reco_trf.py AMI=${AMITAG} inputBSFile=${INFILE} ${MAXEVENTS} trigStream=${TRIGSTR} outputESDFile=myESD_${TRIGSTR}_${JOB}.pool.root outputAODFile=job_${TRIGSTR}_${JOB}.AOD.pool.root outputMuonCalibNtup=muonCalib_${TRIGSTR}_${JOB}.root outputPixelCalibNtup=trackVal_${TRIGSTR}_${JOB}.root HIST=myMergedMonitoring_${TRIGSTR}_${JOB}.root outputTAGComm=myTAGCOMM_${TRIGSTR}_${JOB}.root outputDESD_PHOJETFile=PHOJET.root outputDESD_SGLMUFile=SGLMU.root outputDESDM_TRACKFile=TRACK.root outputDESDM_MUONFile=MUON.root outputDESD_METFile=MET.root outputDESD_MBIASFile=MBIAS.root outputDESDM_EGAMMAFile=EGAMMA.root outputDESDM_CALJETFile=CALJET.root outputDESD_SGLELFile=SGLEL.root outputNTUP_TRIG=TRIG_${TRIGSTR}_${JOB}.root beamType=${BEAMTYPE}" - #RUNCMD="${RUNCMD} beamType=${BEAMTYPE}" -fi - -echo -echo ">> Now running command:" -echo ">> ====================" -echo "$RUNCMD" -echo - -if [ $DRYRUN -ne 1 ]; then - if [ ! "$LOGFILE" = "" ] ; then - $RUNCMD 2>&1 | tee $LOGFILE - else - $RUNCMD - fi -fi - -echo -echo ">> Finished command:" -echo ">> =================" -echo "$RUNCMD" -echo - -## perfmon -if [ ! -z "`ls *.pmon.gz`" ]; then - for pmonfile in `ls *.pmon.gz`; do - echo "Now post-processing perfmon file : ${pmonfile}" - pmoncmd="perfmon.py ${pmonfile} -s True" - $pmoncmd - done -fi - -## done -date - diff --git a/Tools/Tier0ChainTests/scripts/tct_runAll.sh b/Tools/Tier0ChainTests/scripts/tct_runAll.sh deleted file mode 100755 index 42c655be0d057efc03fe6e148bdcd03889dc0757..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_runAll.sh +++ /dev/null @@ -1,210 +0,0 @@ -#!/bin/sh - -## Check arguments -if [ $# -lt 1 ] ; then - echo "Usage: tct_runAll.sh <numrecojobs> [<trigstream>] [<nfilesperjob>] [<neventsperjob>] [<castorbasedir>]" - exit 1 -fi - -## Intercept dryrun option if given -DRYRUN=0 -if [ "$1" = "-d" ] ; then - DRYRUN=1 - shift -fi - -## set configuration parameters -NUMJOBS=$1 -TRIGSTR=IDCosmic -if [ $# -gt 1 ] ; then - TRIGSTR=$2 -fi -NFILES=1 -if [ $# -gt 2 ] ; then - if [ $3 -lt 1 ] ; then - echo "Need at least one input collection per job. Exit." - echo "Usage: tct_runAll.sh <numrecojobs> [<trigstream>] [<nfiles>] [<nevents>] [<castorrundir>]" - exit 1 - fi - NFILES=$3 -fi -NEVENTS=-1 -if [ $# -gt 3 ] ; then - NEVENTS=$4 -fi -CASTOR=/castor/cern.ch/grid/atlas/DAQ/2008/91890 -if [ $# -gt 4 ] ; then - CASTOR=$5 -fi - -## print configuration -echo "" -echo "Test chain configuration: " -echo "-------------------------" -echo "Number of reconstruction jobs : ${NUMJOBS}" -echo "Trigger stream : ${TRIGSTR}" -echo "Collection / reco job : ${NFILES}" -echo "Events / reco job : ${NEVENTS}" -echo "Castor base directory : ${CASTOR}" -echo "" - -## run all tct commands in a sequential chain - -## get ami tag -RUNCMD="tct_getAmiTag.py latest ami_recotrf.cmdargs" -echo -echo ">> Now running command:" -echo ">> ====================" -echo ${RUNCMD} -echo -if [ $DRYRUN -ne 1 ]; then - $RUNCMD -fi -echo -echo ">> Finished command:" -echo ">> ====================" -echo ${RUNCMD} -echo - -## reco jobs -idx=0 -while [ $idx -lt ${NUMJOBS} ] ; -do - RUNCMD="tct_recotrf_Tier0.sh $idx ${TRIGSTR} ${NFILES} ${NEVENTS} ${CASTOR} ami_recotrf.cmdargs" - echo - echo ">> Now running command:" - echo ">> ====================" - echo ${RUNCMD} - echo - if [ $DRYRUN -ne 1 ]; then - $RUNCMD - fi - echo - echo ">> Finished command:" - echo ">> ====================" - echo ${RUNCMD} - echo - idx=`expr $idx + 1` -done - -TRFARGS="preInclude=RecExCond/MinimalCommissioningSetup.py --ignoreunknown" - -## merge aods -INFILE=job_${TRIGSTR}_0.AOD.pool.root -count=1 -while [ $count -lt ${NUMJOBS} ] ; -do - INFILE=${INFILE},job_${TRIGSTR}_$count.AOD.pool.root - count=`expr $count + 1` -done -RUNCMD="MergePool_trf.py maxEvents=-1 inputAODFile=${INFILE} trigStream=$TRIGSTR autoConfiguration=FieldAndGeo,ConditionsTag outputAODFile=MergedAOD.pool.root ${TRFARGS}" -echo -echo ">> Now running command:" -echo ">> ====================" -echo ${RUNCMD} -echo -if [ $DRYRUN -ne 1 ]; then - $RUNCMD -fi -echo -echo ">> Finished command:" -echo ">> ====================" -echo ${RUNCMD} -echo - -## merge esds -INFILE=myESD_${TRIGSTR}_0.pool.root -count=1 -while [ $count -lt ${NUMJOBS} ] ; -do - INFILE=${INFILE},myESD_${TRIGSTR}_$count.pool.root - count=`expr $count + 1` -done - -RUNCMD="MergePool_trf.py maxEvents=-1 inputESDFile=${INFILE} trigStream=$TRIGSTR autoConfiguration=FieldAndGeo,ConditionsTag outputESDFile=MergedESD.pool.root ${TRFARGS}" -echo -echo ">> Now running command:" -echo ">> ====================" -echo ${RUNCMD} -echo -if [ $DRYRUN -ne 1 ]; then - $RUNCMD -fi -echo -echo ">> Finished command:" -echo ">> ====================" -echo ${RUNCMD} -echo - -## merge monitoring files -INFILE="" -count=0 -while [ $count -lt ${NUMJOBS} ] ; -do - INFILE="${INFILE} myMergedMonitoring_${TRIGSTR}_$count.root" - count=`expr $count + 1` -done - -RUNCMD="tct_mergeMonHistograms.sh myMergedMonitoring.root $INFILE" -echo -echo ">> Now running command:" -echo ">> ====================" -echo ${RUNCMD} -echo -if [ $DRYRUN -ne 1 ]; then - $RUNCMD -fi -echo -echo ">> Finished command:" -echo ">> ====================" -echo ${RUNCMD} -echo - -## make tag file -RUNCMD="AODtoTAG_trf.py maxEvents=-1 trigStream=${TRIGSTR} inputAODFile=MergedAOD.pool.root autoConfiguration=FieldAndGeo,ConditionsTag ${TRFARGS}" -echo -echo ">> Now running command:" -echo ">> ====================" -echo ${RUNCMD} -echo -if [ $DRYRUN -ne 1 ]; then - $RUNCMD -fi -echo -echo ">> Finished command:" -echo ">> ====================" -echo ${RUNCMD} -echo - -## upload tag file -RUNCMD="tct_tagupload.sh myTag.pool.root" -echo -echo ">> Now running command:" -echo ">> ====================" -echo ${RUNCMD} -echo -if [ $DRYRUN -ne 1 ]; then - $RUNCMD -fi -echo -echo ">> Finished command:" -echo ">> ====================" -echo ${RUNCMD} -echo - -## dq webdisplay -RUNCMD="tct_dqwebdisplay.sh myMergedMonitoring.root" -echo -echo ">> Now running command:" -echo ">> ====================" -echo ${RUNCMD} -echo -if [ $DRYRUN -ne 1 ]; then - $RUNCMD -fi -echo -echo ">> Finished command:" -echo ">> ====================" -echo ${RUNCMD} -echo - diff --git a/Tools/Tier0ChainTests/scripts/tct_tagupload.sh b/Tools/Tier0ChainTests/scripts/tct_tagupload.sh deleted file mode 100755 index 0ed4e2bb35dac87579c2a1ae40da36c65290a4e5..0000000000000000000000000000000000000000 --- a/Tools/Tier0ChainTests/scripts/tct_tagupload.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/sh - -## Check arguments -if [ $# -lt 1 ] ; then - echo "Usage: tct_tagupload.sh <tagfile>" - exit 1 -fi - -## intercept dryrun option if given -DRYRUN=0 -if [ "$1" = "-d" ] ; then - DRYRUN=1 - shift -fi - -## Check existence of template files -if [ ! -f $1 ] ; then - echo "ERROR: Cannot find tag file " $1 - exit 1 -fi - -## setup configuration -tagfile=$1 -timestamp=$(date "+%Y%m%d_%Hh%Mm%S") -if [ -f authentication.xml ]; then - rm -f authentication.xml -fi - -## do tag upload - -RUNCMD="tagUpload_trf.py $tagfile atn_test_AtlasPOOL AtlasPOOL,takeFromEnv oracle://int8r_nolb/atlas_tags_loading_test" - -echo -echo ">> Now running command:" -echo ">> ====================" -echo "$RUNCMD" -echo -if [ $DRYRUN -ne 1 ]; then - ln -s /afs/cern.ch/atlas/offline/test/POOLCollectionTools/authentication.xml - CollRemove -src atn_test_AtlasPOOL RelationalCollection -srcconnect oracle://int8r_nolb/atlas_tags_loading_test - $RUNCMD - #$RUNCMD >& tag_upload_$timestamp.log -fi -echo -echo ">> Finished command:" -echo ">> =================" - - -## temporary patch, to be fixed in tagUpload_trf.py -echo "ErrorCode=0 (OK)" - diff --git a/Trigger/TrigMonitoring/TrigEgammaMonitoring/python/TrigEgammaMonitCategory.py b/Trigger/TrigMonitoring/TrigEgammaMonitoring/python/TrigEgammaMonitCategory.py index 986d3cb6dbd20e9995daa1eb71a3b7b8a69f03ea..c8f67cf048f31cd9fbbdc406bb9f6145f56218f2 100644 --- a/Trigger/TrigMonitoring/TrigEgammaMonitoring/python/TrigEgammaMonitCategory.py +++ b/Trigger/TrigMonitoring/TrigEgammaMonitoring/python/TrigEgammaMonitCategory.py @@ -37,7 +37,7 @@ def mongroupsCfg(moniAccess, data_type): monitoring_topo = [] mongroups = { 'monitoring_electron' : monitoring_electron, - 'monitoring_photon' : monitoring_photon, + 'monitoring_photon' : monitoring_photon + ['HLT_g140_loose_L1eEM26M','HLT_g300_etcut_L1eEM26M'], 'monitoring_bootstrap' : monitoring_bootstrap, 'monitoringTP_electron' : monitoringTP_electron + ['HLT_e26_lhtight_L1eEM26M','HLT_e28_lhtight_L1eEM26M','HLT_e30_lhvloose_L1eEM26M','HLT_e60_lhvloose_L1eEM26M'], 'monitoring_tags' : monitoring_tags, diff --git a/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref b/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref index 878163521dd00e64eb0424e055be74df119e889f..a1665d51f833043744f9120d91145df3345b37f3 100644 --- a/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref +++ b/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref @@ -109,15 +109,8 @@ HLT_2e5_lhmedium_j70_j50a_j0_DJMASS900j50_L1MJJ-500-NFF: 2: 1 HLT_2e5_lhmedium_j70_j50a_j0_DJMASS900j50_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - 1: 1 - 2: 1 stepFeatures: - 0: 3 - 1: 2 - 2: 3 - 3: 1 + 0: 1 HLT_2e5_lhvloose_L1EM3_bBeeM6000_L1All: eventCount: 0 stepCounts: @@ -1156,10 +1149,10 @@ HLT_2j45_0eta290_020jvt_bdl1d70_j0_HT290_j0_DJMASS700j35_pf_ftf_L1HT150-J20s5pET HLT_2j45_0eta290_020jvt_bdl1d70_j0_HT290_j0_DJMASS700j35_pf_ftf_L1HT150-jJ50s5pETA32_jMJJ-400-CF: eventCount: 0 stepCounts: - 0: 4 + 0: 1 1: 1 stepFeatures: - 0: 12 + 0: 3 1: 54 2: 1 HLT_2j45_0eta290_020jvt_bdl1d70_j0_HT300_j0_DJMASS700j35_pf_ftf_L1HT150-J20s5pETA31_MJJ-400-CF: @@ -1174,10 +1167,10 @@ HLT_2j45_0eta290_020jvt_bdl1d70_j0_HT300_j0_DJMASS700j35_pf_ftf_L1HT150-J20s5pET HLT_2j45_0eta290_020jvt_bdl1d70_j0_HT300_j0_DJMASS700j35_pf_ftf_L1HT150-jJ50s5pETA32_jMJJ-400-CF: eventCount: 0 stepCounts: - 0: 4 + 0: 1 1: 1 stepFeatures: - 0: 12 + 0: 3 1: 54 2: 1 HLT_2j45_0eta290_020jvt_bgn160_2j45_pf_ftf_presel2j25XX2j25b85_L14J15p0ETA25: @@ -1278,10 +1271,10 @@ HLT_2j45_0eta290_020jvt_bgn170_j0_HT290_j0_DJMASS700j35_pf_ftf_L1HT150-J20s5pETA HLT_2j45_0eta290_020jvt_bgn170_j0_HT290_j0_DJMASS700j35_pf_ftf_L1HT150-jJ50s5pETA32_jMJJ-400-CF: eventCount: 0 stepCounts: - 0: 4 + 0: 1 1: 1 stepFeatures: - 0: 12 + 0: 3 1: 54 2: 1 HLT_2j45_0eta290_020jvt_bgn170_j0_HT300_j0_DJMASS700j35_pf_ftf_L1HT150-J20s5pETA31_MJJ-400-CF: @@ -1296,10 +1289,10 @@ HLT_2j45_0eta290_020jvt_bgn170_j0_HT300_j0_DJMASS700j35_pf_ftf_L1HT150-J20s5pETA HLT_2j45_0eta290_020jvt_bgn170_j0_HT300_j0_DJMASS700j35_pf_ftf_L1HT150-jJ50s5pETA32_jMJJ-400-CF: eventCount: 0 stepCounts: - 0: 4 + 0: 1 1: 1 stepFeatures: - 0: 12 + 0: 3 1: 54 2: 1 HLT_2j45_0eta290_020jvt_bgn260_2j45_pf_ftf_presel2j25XX2j25bgtwo85_L14jJ40p0ETA25: @@ -1348,10 +1341,10 @@ HLT_2j45_0eta290_020jvt_bgn260_pf_ftf_xe50_cell_xe85_tcpufit_L12jJ40_jXE110: HLT_2j45_0eta290_020jvt_bgn270_j0_HT290_j0_DJMASS700j35_pf_ftf_L1HT150-jJ50s5pETA32_jMJJ-400-CF: eventCount: 0 stepCounts: - 0: 4 + 0: 1 1: 1 stepFeatures: - 0: 12 + 0: 3 1: 54 2: 1 HLT_2j45_2j45_2timeSig15_L14jJ40: @@ -8179,19 +8172,19 @@ HLT_e5_lhvloose_j70_j50a_j0_DJMASS1000j50_xe50_tcpufit_L1MJJ-500-NFF: HLT_e5_lhvloose_j70_j50a_j0_DJMASS1000j50_xe50_tcpufit_L1jMJJ-500-NFF: eventCount: 0 stepCounts: - 0: 2 - 1: 2 - 2: 2 - 3: 2 - 4: 2 - 5: 2 + 0: 1 + 1: 1 + 2: 1 + 3: 1 + 4: 1 + 5: 1 stepFeatures: - 0: 3 - 1: 3 - 2: 4 - 3: 2 - 4: 2 - 5: 2 + 0: 1 + 1: 1 + 2: 1 + 3: 1 + 4: 1 + 5: 1 HLT_e5_nopid_L1eEM5: eventCount: 20 stepCounts: @@ -8728,21 +8721,15 @@ HLT_g10_loose_L1eEM9: 2: 21 3: 6 HLT_g10_tight_j30a_j25a_pf_ftf_xe35_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 2 - 2: 2 - 3: 1 - 4: 1 - 5: 1 + 0: 1 + 1: 1 + 2: 1 stepFeatures: - 0: 2 - 1: 2 - 2: 2 - 3: 1 - 4: 4 - 5: 21 + 0: 1 + 1: 1 + 2: 1 HLT_g120_loose_L1eEM26M: eventCount: 0 HLT_g120_loose_L1eEM28M: @@ -8764,21 +8751,15 @@ HLT_g12_loose_LArPEBHLT_L1EM10VH: 3: 5 4: 5 HLT_g12_tight_j30a_j25a_pf_ftf_xe35_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 2 - 2: 2 - 3: 1 - 4: 1 - 5: 1 + 0: 1 + 1: 1 + 2: 1 stepFeatures: - 0: 2 - 1: 2 - 2: 2 - 3: 1 - 4: 4 - 5: 21 + 0: 1 + 1: 1 + 2: 1 HLT_g13_loose_g10_loose_EgammaPEBTLA_L113DR25-25M70-eEM12LeEM9L: eventCount: 0 HLT_g13_loose_g10_loose_EgammaPEBTLA_L12DR15-0M30-eEM12LeEM9L: @@ -8871,21 +8852,11 @@ HLT_g15_tight_ringer_L1eEM12L: 2: 3 3: 3 HLT_g16_tight_j30a_j25a_pf_ftf_xe35_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 4 - 5: 21 + 0: 1 HLT_g20_loose_L1eEM18L: eventCount: 4 stepCounts: @@ -8997,47 +8968,21 @@ HLT_g20_medium_xe35_cell_xe40_pfopufit_L1XE35: HLT_g20_tight_2j35a_j0_DJMASS300j35_pf_ftf_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 2 + 0: 1 HLT_g20_tight_2j35a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 2 + 0: 1 HLT_g20_tight_2j35a_j0_pf_ftf_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 2 - 5: 41 + 0: 1 HLT_g20_tight_L1eEM18M: eventCount: 3 stepCounts: @@ -9053,53 +8998,21 @@ HLT_g20_tight_L1eEM18M: HLT_g20_tight_icaloloose_2j35a_j0_DJMASS300j35_pf_ftf_xe40_cell_xe50_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_2j35a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 2 + 0: 1 HLT_g20_tight_icaloloose_2j35a_j0_DJMASS500j35_pf_ftf_xe40_cell_xe50_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_L1eEM18M: eventCount: 3 stepCounts: @@ -9115,553 +9028,150 @@ HLT_g20_tight_icaloloose_L1eEM18M: 3: 3 4: 3 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe10_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe20_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe25_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe30_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe35_cell_xe10_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe35_cell_xe20_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe35_cell_xe30_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe35_cell_xe50_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe35_cell_xe60_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe40_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe50_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_j18a_pf_ftf_xe60_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 28 + 0: 1 HLT_g20_tight_icaloloose_j30a_pf_ftf_xe10_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 + 0: 1 HLT_g20_tight_icaloloose_j30a_pf_ftf_xe20_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 + 0: 1 HLT_g20_tight_icaloloose_j30a_pf_ftf_xe25_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 + 0: 1 HLT_g20_tight_icaloloose_j30a_pf_ftf_xe30_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 + 0: 1 HLT_g20_tight_icaloloose_j30a_pf_ftf_xe35_cell_xe10_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 + 0: 1 HLT_g20_tight_icaloloose_j30a_pf_ftf_xe35_cell_xe20_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 + 0: 1 HLT_g20_tight_icaloloose_j30a_pf_ftf_xe35_cell_xe30_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 - stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 - stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 -HLT_g20_tight_icaloloose_j30a_pf_ftf_xe35_cell_xe50_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 - stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 - stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 -HLT_g20_tight_icaloloose_j30a_pf_ftf_xe35_cell_xe60_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 - stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 - stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 -HLT_g20_tight_icaloloose_j30a_pf_ftf_xe40_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 - stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 - stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 -HLT_g20_tight_icaloloose_j30a_pf_ftf_xe50_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 - stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 - stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 -HLT_g20_tight_icaloloose_j30a_pf_ftf_xe60_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 - stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 - stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 9 -HLT_g20_tight_icaloloose_j35_0eta290_020jvt_bdl1d77_3j35a_j0_DJMASS500j35_pf_ftf_L1EM18VHI_MJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 -HLT_g20_tight_icaloloose_j35_0eta290_020jvt_bdl1d77_3j35a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: + 0: 1 +HLT_g20_tight_icaloloose_j30a_pf_ftf_xe35_cell_xe50_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 -HLT_g20_tight_icaloloose_j35_0eta290_020jvt_bgn177_3j35a_j0_DJMASS500j35_pf_ftf_L1EM18VHI_MJJ-300: + 0: 1 +HLT_g20_tight_icaloloose_j30a_pf_ftf_xe35_cell_xe60_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 -HLT_g20_tight_icaloloose_j35_0eta290_020jvt_bgn177_3j35a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: + 0: 1 +HLT_g20_tight_icaloloose_j30a_pf_ftf_xe40_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 -HLT_g20_tight_icaloloose_j35_0eta290_020jvt_bgn277_3j35a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: + 0: 1 +HLT_g20_tight_icaloloose_j30a_pf_ftf_xe50_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 -HLT_g20_tight_icaloloose_j35_j20a_pf_ftf_xe35_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 - stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 23 -HLT_g20_tight_icaloloose_j35_j25a_j0_DJMASS300j35_pf_ftf_xe25_cell_xe35_pfopufit_L1eEM22M_jMJJ-300: + 0: 1 +HLT_g20_tight_icaloloose_j30a_pf_ftf_xe60_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 - stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 -HLT_g20_tight_icaloloose_j35a_j0_DJMASS300j35_pf_ftf_xe25_cell_xe35_pfopufit_L1eEM22M_jMJJ-300: + stepCounts: + 0: 1 + stepFeatures: + 0: 1 +HLT_g20_tight_icaloloose_j35_0eta290_020jvt_bdl1d77_3j35a_j0_DJMASS500j35_pf_ftf_L1EM18VHI_MJJ-300: eventCount: 0 stepCounts: 0: 2 @@ -9676,9 +9186,14 @@ HLT_g20_tight_icaloloose_j35a_j0_DJMASS300j35_pf_ftf_xe25_cell_xe35_pfopufit_L1e 2: 1 3: 1 4: 1 - 5: 4 - 6: 1 -HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe10_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: + 5: 3 +HLT_g20_tight_icaloloose_j35_0eta290_020jvt_bdl1d77_3j35a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: + eventCount: 0 + stepCounts: + 0: 1 + stepFeatures: + 0: 1 +HLT_g20_tight_icaloloose_j35_0eta290_020jvt_bgn177_3j35a_j0_DJMASS500j35_pf_ftf_L1EM18VHI_MJJ-300: eventCount: 0 stepCounts: 0: 2 @@ -9693,347 +9208,163 @@ HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe10_cell_xe40_pfopufi 2: 1 3: 1 4: 1 - 5: 5 - 6: 1 + 5: 3 +HLT_g20_tight_icaloloose_j35_0eta290_020jvt_bgn177_3j35a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: + eventCount: 0 + stepCounts: + 0: 1 + stepFeatures: + 0: 1 +HLT_g20_tight_icaloloose_j35_0eta290_020jvt_bgn277_3j35a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: + eventCount: 0 + stepCounts: + 0: 1 + stepFeatures: + 0: 1 +HLT_g20_tight_icaloloose_j35_j20a_pf_ftf_xe35_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: + eventCount: 0 + stepCounts: + 0: 1 + stepFeatures: + 0: 1 +HLT_g20_tight_icaloloose_j35_j25a_j0_DJMASS300j35_pf_ftf_xe25_cell_xe35_pfopufit_L1eEM22M_jMJJ-300: + eventCount: 0 + stepCounts: + 0: 1 + stepFeatures: + 0: 1 +HLT_g20_tight_icaloloose_j35a_j0_DJMASS300j35_pf_ftf_xe25_cell_xe35_pfopufit_L1eEM22M_jMJJ-300: + eventCount: 0 + stepCounts: + 0: 1 + stepFeatures: + 0: 1 +HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe10_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: + eventCount: 0 + stepCounts: + 0: 1 + stepFeatures: + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe20_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe25_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe30_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe35_cell_xe10_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe35_cell_xe20_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe35_cell_xe30_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe35_cell_xe50_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe35_cell_xe60_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe40_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe50_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS300j35_pf_ftf_xe60_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j20a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 + 0: 1 HLT_g20_tight_icaloloose_j35a_j25a_j0_DJMASS300j35_pf_ftf_xe25_cell_xe45_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j25a_j0_DJMASS300j35_pf_ftf_xe30_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 - stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + stepCounts: + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j25a_j0_DJMASS300j35_pf_ftf_xe35_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j25a_j0_DJMASS300j35_pf_ftf_xe40_cell_xe50_pfopufit_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 5 - 6: 1 + 0: 1 HLT_g20_tight_icaloloose_j35a_j25a_pf_ftf_xe35_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 - 6: 18 + 0: 1 HLT_g20_tight_icaloloose_j35a_pf_ftf_xe35_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 - 6: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 - 6: 6 + 0: 1 HLT_g20_tight_icaloloose_j40_j35a_j20c_j0_DJMASS300j35_pf_ftf_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 4 + 0: 1 HLT_g20_tight_icaloloose_j40_j35a_j25a_j25_j20c_j0_DJMASS300j35_pf_ftf_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 6 + 0: 1 HLT_g20_tight_icaloloose_ringer_L1eEM18M: eventCount: 2 stepCounts: @@ -10051,51 +9382,21 @@ HLT_g20_tight_icaloloose_ringer_L1eEM18M: HLT_g20_tight_icalotight_j35a_j20a_j0_DJMASS300j35_pf_ftf_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 + 0: 1 HLT_g20_tight_icalotight_j35a_j20a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 3 + 0: 1 HLT_g20_tight_j30a_j25a_pf_ftf_xe35_cell_xe40_pfopufit_L1eEM22M_jMJJ-300: - eventCount: 1 + eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 - 5: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 4 - 5: 21 + 0: 1 HLT_g20_tight_j35_0eta290_020jvt_bdl1d77_3j35a_j0_DJMASS500j35_pf_ftf_L1EM18VHI_MJJ-300: eventCount: 0 stepCounts: @@ -10127,31 +9428,15 @@ HLT_g20_tight_j35_0eta290_020jvt_bgn177_3j35a_j0_DJMASS500j35_pf_ftf_L1EM18VHI_M HLT_g20_tight_j35_0eta290_020jvt_bgn177_3j35a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 3 + 0: 1 HLT_g20_tight_j35_0eta290_020jvt_bgn277_3j35a_j0_DJMASS500j35_pf_ftf_L1eEM22M_jMJJ-300: eventCount: 0 stepCounts: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 1 + 0: 1 stepFeatures: - 0: 2 - 1: 1 - 2: 1 - 3: 1 - 4: 3 + 0: 1 HLT_g20_tight_ringer_L1eEM18M: eventCount: 2 stepCounts: @@ -14104,22 +13389,10 @@ HLT_isotrk50_L1XE50: 1: 4 HLT_j0_DJMASS900j50dphi260x200deta_calratiovar150_roiftf_preselj20emf24_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - stepFeatures: - 0: 1 HLT_j0_DJMASS900j50dphi260x200deta_calratiovar165_roiftf_preselj20emf18_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - stepFeatures: - 0: 1 HLT_j0_DJMASS900j50dphi260x200deta_calratiovar186_roiftf_preselj20emf12_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - stepFeatures: - 0: 1 HLT_j0_DJMASS900j50dphi260x200deta_calratiovar59_roiftf_preselj20emf72_L1MJJ-500-NFF: eventCount: 1 stepCounts: @@ -14249,11 +13522,11 @@ HLT_j0_HT1000_L1HT190-J15s5pETA21: stepFeatures: 0: 18 HLT_j0_HT1000_L1HT190-jJ40s5pETA21: - eventCount: 2 + eventCount: 1 stepCounts: - 0: 2 + 0: 1 stepFeatures: - 0: 38 + 0: 18 HLT_j0_HT1000_L1J100: eventCount: 1 stepCounts: @@ -15994,13 +15267,13 @@ HLT_j20_CLEANllp_momemfrac072_calratiovar82_roiftf_preselj20emf60_L1MJJ-500-NFF: 1: 5 2: 1 HLT_j20_DarkJetPEBTLA_L1HT190-jJ40s5pETA21: - eventCount: 15 + eventCount: 8 stepCounts: - 0: 15 - 1: 15 + 0: 8 + 1: 8 stepFeatures: - 0: 162 - 1: 162 + 0: 75 + 1: 75 HLT_j20_L1HT190-J15s5pETA21: eventCount: 8 stepCounts: @@ -16018,15 +15291,15 @@ HLT_j20_PhysicsTLA_L1HT190-J15s5pETA21: 1: 75 2: 8 HLT_j20_PhysicsTLA_L1HT190-jJ40s5pETA21: - eventCount: 15 + eventCount: 8 stepCounts: - 0: 15 - 1: 15 - 2: 15 + 0: 8 + 1: 8 + 2: 8 stepFeatures: - 0: 162 - 1: 162 - 2: 15 + 0: 75 + 1: 75 + 2: 8 HLT_j20_PhysicsTLA_L1J100: eventCount: 3 stepCounts: @@ -17083,10 +16356,6 @@ HLT_j30_CLEANllp_momemfrac006_calratiormbib_L1jJ160: eventCount: 0 HLT_j30_CLEANllp_momemfrac012_calratiormbib_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - stepFeatures: - 0: 1 HLT_j30_CLEANllp_momemfrac012_calratiovar186_roiftf_preselj20emf12_L1jJ160: eventCount: 0 stepCounts: @@ -17097,12 +16366,6 @@ HLT_j30_CLEANllp_momemfrac012_calratiovar186_roiftf_preselj20emf12_L1jJ160: 1: 1 HLT_j30_CLEANllp_momemfrac012_calratiovar186_roiftf_preselj20emf12_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - 1: 1 - stepFeatures: - 0: 1 - 1: 1 HLT_j30_CLEANllp_momemfrac012_calratiovar_roiftf_preselj20emf12_L1jJ160: eventCount: 0 stepCounts: @@ -18279,10 +17542,10 @@ HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bdl1d70_j0_DJMASS1000j50_pf_ftf_p HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bdl1d70_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF: eventCount: 0 stepCounts: - 0: 2 + 0: 1 1: 1 stepFeatures: - 0: 10 + 0: 5 1: 62 2: 1 HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bgn170_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1MJJ-500-NFF: @@ -18297,19 +17560,19 @@ HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bgn170_j0_DJMASS1000j50_pf_ftf_pr HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bgn170_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF: eventCount: 0 stepCounts: - 0: 2 + 0: 1 1: 1 stepFeatures: - 0: 10 + 0: 5 1: 62 2: 1 HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bgn270_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF: eventCount: 0 stepCounts: - 0: 2 + 0: 1 1: 1 stepFeatures: - 0: 10 + 0: 5 1: 62 2: 1 ? HLT_j65c_020jvt_j40c_020jvt_j25c_020jvt_j20c_020jvt_SHARED_j20c_020jvt_bgn285_pf_ftf_presel2c20XX1c20bgtwo80XX1c20gntau80_L1jJ85p0ETA21_3jJ40p0ETA25 @@ -18558,10 +17821,10 @@ HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bdl1d70_j0_DJMASS1000j50_pf_ftf_p HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bdl1d70_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF: eventCount: 0 stepCounts: - 0: 2 + 0: 1 1: 1 stepFeatures: - 0: 10 + 0: 5 1: 58 2: 1 HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bgn170_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1MJJ-500-NFF: @@ -18576,10 +17839,10 @@ HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bgn170_j0_DJMASS1000j50_pf_ftf_pr HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bgn170_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF: eventCount: 0 stepCounts: - 0: 2 + 0: 1 1: 1 stepFeatures: - 0: 10 + 0: 5 1: 58 2: 1 HLT_j75_0eta290_020jvt_bdl1d60_3j75_pf_ftf_preselj50b85XX3j50_L14J20: @@ -31478,15 +30741,15 @@ HLT_tau25_mediumRNN_tracktwoMVA_tau20_mediumRNN_tracktwoMVA_03dRAB_j70_j50a_j0_D HLT_tau25_mediumRNN_tracktwoMVA_tau20_mediumRNN_tracktwoMVA_03dRAB_j70_j50a_j0_DJMASS900j50_L1jMJJ-500-NFF: eventCount: 0 stepCounts: - 0: 2 - 1: 2 - 2: 2 - 3: 2 + 0: 1 + 1: 1 + 2: 1 + 3: 1 stepFeatures: - 0: 16 - 1: 16 - 2: 16 - 3: 16 + 0: 10 + 1: 10 + 2: 10 + 3: 10 HLT_tau25_mediumRNN_tracktwoMVA_tau25_mediumRNN_tracktwoMVA_03dRAB30_L1DR-TAU20ITAU12I-J25: eventCount: 0 stepCounts: diff --git a/Trigger/TrigValidation/TrigP1Test/share/ref_v1Dev_decodeBS_build.ref b/Trigger/TrigValidation/TrigP1Test/share/ref_v1Dev_decodeBS_build.ref index 4cf2a54fe0bd8327d6621479412c4a4bbc984652..2bbb0f5dffb6de82d21a1e3efc53a7ddaa3f798e 100644 --- a/Trigger/TrigValidation/TrigP1Test/share/ref_v1Dev_decodeBS_build.ref +++ b/Trigger/TrigValidation/TrigP1Test/share/ref_v1Dev_decodeBS_build.ref @@ -52,8 +52,6 @@ HLT_2e5_lhmedium_j70_j50a_j0_DJMASS900j50_L1MJJ-500-NFF: eventCount: 0 HLT_2e5_lhmedium_j70_j50a_j0_DJMASS900j50_L1jMJJ-500-NFF: eventCount: 0 - stepFeatures: - 0: 1 HLT_2e5_lhvloose_L1EM3_bBeeM6000_L1All: eventCount: 0 stepCounts: @@ -2689,12 +2687,6 @@ HLT_e5_lhvloose_j70_j50a_j0_DJMASS1000j50_xe50_tcpufit_L1MJJ-500-NFF: eventCount: 0 HLT_e5_lhvloose_j70_j50a_j0_DJMASS1000j50_xe50_tcpufit_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - 1: 1 - stepFeatures: - 0: 1 - 1: 1 HLT_e5_nopid_L1eEM5: eventCount: 18 stepCounts: @@ -7403,24 +7395,12 @@ HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bdl1d70_j0_DJMASS1000j50_pf_ftf_p eventCount: 0 HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bdl1d70_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - stepFeatures: - 0: 5 HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bgn170_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1MJJ-500-NFF: eventCount: 0 HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bgn170_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - stepFeatures: - 0: 5 HLT_j65a_j45a_2j35a_SHARED_2j35_0eta290_020jvt_bgn270_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - stepFeatures: - 0: 5 ? HLT_j65c_020jvt_j40c_020jvt_j25c_020jvt_j20c_020jvt_SHARED_j20c_020jvt_bgn285_pf_ftf_presel2c20XX1c20bgtwo80XX1c20gntau80_L1jJ85p0ETA21_3jJ40p0ETA25 : eventCount: 0 ? HLT_j65c_020jvt_j40c_020jvt_j25c_020jvt_j20c_020jvt_SHARED_j20c_020jvt_bgn285_pf_ftf_presel2c20XX1c20bgtwo82XX1c20gntau80_L1jJ85p0ETA21_3jJ40p0ETA25 @@ -7477,18 +7457,10 @@ HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bdl1d70_j0_DJMASS1000j50_pf_ftf_p eventCount: 0 HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bdl1d70_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - stepFeatures: - 0: 5 HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bgn170_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1MJJ-500-NFF: eventCount: 0 HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bgn170_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - stepFeatures: - 0: 5 HLT_j75_0eta290_020jvt_bdl1d60_3j75_pf_ftf_preselj50b85XX3j50_L14J20: eventCount: 0 HLT_j75_0eta290_020jvt_bdl1d60_3j75_pf_ftf_preselj50b85XX3j50_L14jJ50: @@ -11409,16 +11381,6 @@ HLT_tau25_mediumRNN_tracktwoMVA_tau20_mediumRNN_tracktwoMVA_03dRAB_j70_j50a_j0_D eventCount: 0 HLT_tau25_mediumRNN_tracktwoMVA_tau20_mediumRNN_tracktwoMVA_03dRAB_j70_j50a_j0_DJMASS900j50_L1jMJJ-500-NFF: eventCount: 0 - stepCounts: - 0: 1 - 1: 1 - 2: 1 - 3: 1 - stepFeatures: - 0: 5 - 1: 5 - 2: 5 - 3: 5 HLT_tau25_mediumRNN_tracktwoMVA_tau25_mediumRNN_tracktwoMVA_03dRAB30_L1DR-TAU20ITAU12I-J25: eventCount: 0 HLT_tau25_mediumRNN_tracktwoMVA_tau25_mediumRNN_tracktwoMVA_03dRAB30_L1cTAU30M_2cTAU20M_DR-eTAU30eTAU20-jJ55: diff --git a/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py b/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py index 378a072e1976cd476076c992bc97a9cf17fdddf5..8c312f6f11ba7279be60ea80370699c5618a36ed 100644 --- a/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py +++ b/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py @@ -20,7 +20,7 @@ __log = logging.getLogger('TriggerEDMRun3Config') # ------------------------------------------------------------ # Additional properties for EDM collections # ------------------------------------------------------------ -from TrigEDMConfig.TriggerEDMDefs import Alias, InViews +from TrigEDMConfig.TriggerEDMDefs import Alias, InViews, allowTruncation # ------------------------------------------------------------ # Lists of variables to be kept in the collections @@ -849,8 +849,8 @@ TriggerHLTListRun3 = [ ('xAOD::MuonAuxContainer#HLT_MuonsCB_RoI_TLAAux.', 'BS PhysicsTLA DarkJetPEBTLA ESD', 'Muon'), # FS tracks - ('xAOD::TrackParticleContainer#HLT_IDTrack_FS_FTF', 'BS PhysicsTLA ESD AODFULL', 'Jet'), - ('xAOD::TrackParticleAuxContainer#HLT_IDTrack_FS_FTFAux.passPFTrackPresel.muonCaloTag.muonScore.ptCone20.etConeCore.trackIso.RErr.EOverP.caloIso.trkPtFraction.tagFakeTrack.tagMuonTrack.tagIsoTrack', 'BS PhysicsTLA ESD AODFULL', 'Jet'), + ('xAOD::TrackParticleContainer#HLT_IDTrack_FS_FTF', 'BS PhysicsTLA ESD AODFULL', 'Jet', [allowTruncation]), + ('xAOD::TrackParticleAuxContainer#HLT_IDTrack_FS_FTFAux.passPFTrackPresel.muonCaloTag.muonScore.ptCone20.etConeCore.trackIso.RErr.EOverP.caloIso.trkPtFraction.tagFakeTrack.tagMuonTrack.tagIsoTrack', 'BS PhysicsTLA ESD AODFULL', 'Jet', [allowTruncation]), # FS vertices ('xAOD::VertexContainer#HLT_IDVertex_FS', 'BS PhysicsTLA ESD AODFULL AODSLIM', 'Jet'), @@ -993,8 +993,8 @@ TriggerHLTListRun3 = [ ('xAOD::TauJetAuxContainer#HLT_TrigTauRecMerged_LRTAux.', 'BS ESD AODFULL AODSLIM', 'Tau'), # tau calo clusters - ('xAOD::CaloClusterContainer#HLT_TopoCaloClustersLC', 'BS ESD AODFULL', 'Tau', [InViews('tauCaloMVAViews')]), - ('xAOD::CaloClusterTrigAuxContainer#HLT_TopoCaloClustersLCAux.nCells.CENTER_MAG', 'BS ESD AODFULL', 'Tau'), + ('xAOD::CaloClusterContainer#HLT_TopoCaloClustersLC', 'BS ESD AODFULL', 'Tau', [InViews('tauCaloMVAViews'), allowTruncation]), + ('xAOD::CaloClusterTrigAuxContainer#HLT_TopoCaloClustersLCAux.nCells.CENTER_MAG', 'BS ESD AODFULL', 'Tau', [allowTruncation]), # tau tracks ('xAOD::TauTrackContainer#HLT_tautrack_MVA', 'BS ESD AODFULL AODSLIM', 'Tau', [InViews('precMVATauViews')]), diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/TopoAlgoDef.py b/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/TopoAlgoDef.py index 151dfa2b5e0fd4b630a2a67be48675bdcafb284d..b3fd46d012a07b3aad1253753a43ce143ef2ef34 100644 --- a/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/TopoAlgoDef.py +++ b/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/TopoAlgoDef.py @@ -533,15 +533,15 @@ class TopoAlgoDef: # (ATR-8194) L1Topo HT Trigger algoList = [ - {"minHT": 150, "otype" : "jJ", "ocut" : 50, "olist" : "s", "nleading" : 5, "inputwidth": HW.jJetOutputWidthSort, "oeta" : 32}, #HT150-jJ50s5pETA32 - {"minHT": 190, "otype" : "jJ", "ocut" : 40, "olist" : "s", "nleading" : 5, "inputwidth": HW.jJetOutputWidthSort, "oeta" : 21}, #HT190-jJ40s5pETA21 + {"itemNameMinHT": 150, "minHT": 240, "otype" : "jJ", "ocut" : 50, "olist" : "s", "nleading" : 5, "inputwidth": HW.jJetOutputWidthSort, "oeta" : 32}, #HT150-jJ50s5pETA32 + {"itemNameMinHT": 190, "minHT": 300, "otype" : "jJ", "ocut" : 40, "olist" : "s", "nleading" : 5, "inputwidth": HW.jJetOutputWidthSort, "oeta" : 21}, #HT190-jJ40s5pETA21 ] for x in algoList: class d: pass for k in x: setattr (d, k, x[k]) - toponame = "HT%d-%s%s%s%spETA%s" % (d.minHT, d.otype, str(d.ocut), d.olist, str(d.nleading) if d.olist=="s" else "", str(d.oeta)) + toponame = "HT%d-%s%s%s%spETA%s" % (d.itemNameMinHT, d.otype, str(d.ocut), d.olist, str(d.nleading) if d.olist=="s" else "", str(d.oeta)) log.debug("Define %s", toponame) inputList = d.otype + d.olist alg = AlgConf.JetHT( name = toponame, inputs = inputList, outputs = [toponame] ) @@ -1216,22 +1216,22 @@ class TopoAlgoDef: # (ATR-12748) fat jet trigger with Simple Cone algo algoList = [ - {"minHT": 111, "otype" : "CjJ", "ocut" : 40, "olist" : "ab", "nleading" : HW.jJetOutputWidthSelect, "inputwidth": HW.jJetOutputWidthSelect, "oeta" : 26}, #SC111-CjJ40abpETA26 + {"itemNameMinHT": 111, "minHT": 166, "otype" : "CjJ", "ocut" : 40, "olist" : "ab", "nleading" : HW.jJetOutputWidthSelect, "inputwidth": HW.jJetOutputWidthSelect, "oeta" : 26}, #SC111-CjJ40abpETA26 ] for x in algoList: class d: pass for k in x: setattr (d, k, x[k]) - toponame = "SC%d-%s%s%s%spETA%s" % (d.minHT, d.otype, str(d.ocut), d.olist, str(d.nleading) if d.olist=="s" else "", str(d.oeta)) + toponame = "SC%d-%s%s%s%spETA%s" % (d.itemNameMinHT, d.otype, str(d.ocut), d.olist, str(d.nleading) if d.olist=="s" else "", str(d.oeta)) log.debug("Define %s", toponame) inputList = d.otype + d.olist alg = AlgConf.SimpleCone( name = toponame, inputs = inputList, outputs = [toponame] ) alg.addgeneric('InputWidth', d.inputwidth) alg.addgeneric('NumResultBits', 1) - alg.addvariable('MinET', get_threshold_cut(d.otype, d.ocut)*_et_conversion) + alg.addvariable('MinET', d.ocut*_et_conversion) alg.addvariable('MinSumET', d.minHT*_et_conversion) - alg.addvariable('MaxRSqr', 10*10*_dr_conversion*_dr_conversion) + alg.addvariable('MaxRSqr', 10*10*_dr_conversion*_dr_conversion) tm.registerTopoAlgo(alg) # 0INVM9-eEM9ab-eEMab @@ -1583,9 +1583,10 @@ class TopoAlgoDef: alg.addvariable('DisambDRSqrMax', d.drcutmax*d.drcutmax*_dr_conversion*_dr_conversion, 0) alg.addvariable('DisambDRSqr', d.disamb*d.disamb*_dr_conversion*_dr_conversion, 0) tm.registerTopoAlgo(alg) + # jINVM + DPHI NFFDphimap = [ - { "minInvm": 400 , "minDphi": 0, "maxDphiList": [26, 24, 22, 20], + { "itemNameMinInvm": 400, "minInvm": 640 , "minDphi": 0, "maxDphiList": [26, 24, 22, 20], "otype1" : "AjJ", "ocut1" : 60, "olist1" : "s", "nleading1" : 6, "inputwidth": HW.jJetOutputWidthSort, "otype2" : "AjJ", "ocut2" : 50, "olist2" : "s", "nleading2" : 6 } ] @@ -1597,7 +1598,7 @@ class TopoAlgoDef: inputList = [d.otype1 + d.olist1, d.otype2 + d.olist1] toponames=[] for maxDphi in d.maxDphiList: - toponames.append ("%iINVM-%iDPHI%i-%s%s%s%s-%s%s%s%s" % (d.minInvm, d.minDphi, maxDphi, + toponames.append ("%iINVM-%iDPHI%i-%s%s%s%s-%s%s%s%s" % (d.itemNameMinInvm, d.minDphi, maxDphi, d.otype1, str(d.ocut1) , d.olist1, str(d.nleading1) if d.olist1=="s" else "", d.otype2, str(d.ocut2) , d.olist2, str(d.nleading2) if d.olist2=="s" else "")) alg = AlgConf.InvariantMassDeltaPhiInclusive2( name = 'jINVM_DPHI', inputs = inputList, outputs = toponames) @@ -1618,7 +1619,7 @@ class TopoAlgoDef: # jINVM_NFF + DPHI NFFDphimap = [ - { "minInvm": 400 , "minDphi": 0, "maxDphiList": [26, 24, 22, 20], + { "itemNameMinInvm": 400, "minInvm": 640 , "minDphi": 0, "maxDphiList": [26, 24, 22, 20], "otype1" : "jJ", "ocut1" : 60, "olist1" : "s", "nleading1" : 6, "inputwidth": HW.jJetOutputWidthSort, "otype2" : "AjJ", "ocut2" : 50, "olist2" : "s", "nleading2" : 6 } ] @@ -1630,7 +1631,7 @@ class TopoAlgoDef: inputList = [d.otype1 + d.olist1, d.otype2 + d.olist1] toponames=[] for maxDphi in d.maxDphiList: - toponames.append ("%iINVM-%iDPHI%i-%s%s%s%s-%s%s%s%s" % (d.minInvm, d.minDphi, maxDphi, + toponames.append ("%iINVM-%iDPHI%i-%s%s%s%s-%s%s%s%s" % (d.itemNameMinInvm, d.minDphi, maxDphi, d.otype1, str(d.ocut1) , d.olist1, str(d.nleading1) if d.olist1=="s" else "", d.otype2, str(d.ocut2) , d.olist2, str(d.nleading2) if d.olist2=="s" else "")) alg = AlgConf.InvariantMassDeltaPhiInclusive2( name = 'jINVM_DPHI_NFF', inputs = inputList, outputs = toponames) @@ -1651,7 +1652,7 @@ class TopoAlgoDef: # CF algoList = [ - { "minInvm": 400, "otype1" : "AjJ", "ocut1": 60, "olist1" : "s", "nleading1" : 6, "inputwidth1": HW.jJetOutputWidthSort, + { "itemNameMinInvm": 400, "minInvm": 640, "otype1" : "AjJ", "ocut1": 60, "olist1" : "s", "nleading1" : 6, "inputwidth1": HW.jJetOutputWidthSort, "otype2" : "AjJ", "ocut2": 50, "olist2" : "s", "nleading2" : 6, "inputwidth2": HW.jJetOutputWidthSort, "applyEtaCut":1, "minEta1": 0 ,"maxEta1": 32 , "minEta2": 30 ,"maxEta2": 49 , }, #400INVM-AjJ60s6pETA32-AjJ50s6p30ETA49 ] @@ -1663,7 +1664,7 @@ class TopoAlgoDef: obj1 = "%s%s%sp%sETA%i" % (d.otype1, str(d.ocut1), d.olist1 + (str(d.nleading1) if d.olist1.find('s')>=0 else ""),str(d.minEta1) if d.minEta1>0 else "", d.maxEta1) obj2 = "-%s%s%sp%sETA%i" % (d.otype2, str(d.ocut2), d.olist2 + (str(d.nleading2) if d.olist2.find('s')>=0 else ""),str(d.minEta2) if d.minEta2>0 else "", d.maxEta2) inputList = [d.otype1 + d.olist1, d.otype2 + d.olist2] - toponame = "%iINVM-%s%s" % (d.minInvm, obj1, obj2) + toponame = "%iINVM-%s%s" % (d.itemNameMinInvm, obj1, obj2) alg = AlgConf.InvariantMassInclusive2( name = toponame, inputs = inputList, outputs = toponame) alg.addgeneric('InputWidth1', d.inputwidth1) alg.addgeneric('InputWidth2', d.inputwidth2) @@ -1685,7 +1686,7 @@ class TopoAlgoDef: # jINVM NFFmap = [ - { "minInvmList": [300,400,500,700] , + { "itemNameMinInvmList": [300, 400, 500, 700], "minInvmList": [480, 640, 800, 1100] , "otype1" : "AjJ", "ocut1" : 60, "olist1" : "s", "nleading1" : 6, "inputwidth": HW.jJetOutputWidthSort, "otype2" : "AjJ", "ocut2" : 50, "olist2" : "s", "nleading2" : 6 } ] @@ -1696,7 +1697,7 @@ class TopoAlgoDef: setattr (d, k, x[k]) inputList = [d.otype1 + d.olist1, d.otype2 + d.olist1] toponames=[] - for minInvm in d.minInvmList: + for minInvm in d.itemNameMinInvmList: toponames.append ("%iINVM-%s%s%s%s-%s%s%s%s" % (minInvm, d.otype1, str(d.ocut1) , d.olist1, str(d.nleading1) if d.olist1=="s" else "", d.otype2, str(d.ocut2) , d.olist2, str(d.nleading2) if d.olist2=="s" else "")) @@ -1715,7 +1716,7 @@ class TopoAlgoDef: # jINVM_NFF NFFmap = [ - { "minInvmList": [300,400,500,700] , + { "itemNameMinInvmList": [300, 400, 500, 700], "minInvmList": [480, 640, 800, 1100] , "otype1" : "jJ", "ocut1" : 60, "olist1" : "s", "nleading1" : 6, "inputwidth": HW.jJetOutputWidthSort, "otype2" : "AjJ", "ocut2" : 50, "olist2" : "s", "nleading2" : 6 } ] @@ -1726,7 +1727,7 @@ class TopoAlgoDef: setattr (d, k, x[k]) inputList = [d.otype1 + d.olist1, d.otype2 + d.olist1] toponames=[] - for minInvm in d.minInvmList: + for minInvm in d.itemNameMinInvmList: toponames.append ("%iINVM-%s%s%s%s-%s%s%s%s" % (minInvm, d.otype1, str(d.ocut1) , d.olist1, str(d.nleading1) if d.olist1=="s" else "", d.otype2, str(d.ocut2) , d.olist2, str(d.nleading2) if d.olist2=="s" else ""))