Skip to content
Snippets Groups Projects
Commit 37d1c7f4 authored by Rafal Bielski's avatar Rafal Bielski :wave: Committed by Edward Moyse
Browse files

TrigUpgradeTest: delete package

parent af0f5c99
No related branches found
No related tags found
No related merge requests found
Showing
with 35 additions and 434 deletions
......@@ -39,10 +39,6 @@ if TriggerFlags.doCalo:
#topSequence += algo
#from TrigUpgradeTest.TestUtils import L1DecoderTest
#l1DecoderTest=L1DecoderTest()
#topSequence+=l1DecoderTest
from L1Decoder.L1DecoderConfig import mapThresholdToL1RoICollection
......
#!/bin/bash
# art-description: athenaMT trigger test with only calo algorithms
# art-type: build
# art-include: master/Athena
# Skipping art-output which has no effect for build tests.
# If you create a grid version, check art-output in existing grid tests.
export EVENTS=20
export THREADS=1
export SLOTS=1
export JOBOPTION="TrigT2CaloCommon/Calo.py"
# athena.py --imf --evtMax 20 --threads 1 --concurrent-events 1 --filesInput /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/TrigP1Test/data18_13TeV.00360026.physics_EnhancedBias.merge.RAW._lb0151._SFO-1._0001.1 TrigT2CaloCommon/Calo.py
# Skip dumping chain counts because this test doesn't produce the histogram including them
export SKIP_CHAIN_DUMP=1
source exec_TrigUpgradeTest_art_athenaMT.sh
source exec_TrigUpgradeTest_art_post.sh
# uncomment the line above and remove this message to restore the test after the job opts are fixed
echo "test disabled temporarily"
#!/bin/bash
# art-description: athenaMT trigger test running only ringer
# art-type: build
# art-include: master/Athena
# Skipping art-output which has no effect for build tests.
# If you create a grid version, check art-output in existing grid tests.
export EVENTS=20
export THREADS=1
export SLOTS=1
export JOBOPTION="TrigT2CaloCommon/CaloRinger.py"
# athena.py --imf --evtMax 20 --threads 1 --concurrent-events 1 --filesInput /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/TrigP1Test/data18_13TeV.00360026.physics_EnhancedBias.merge.RAW._lb0151._SFO-1._0001.1 TrigT2CaloCommon/CaloRinger.py
# Skip dumping chain counts because this test doesn't produce the histogram including them
export SKIP_CHAIN_DUMP=1
source exec_TrigUpgradeTest_art_athenaMT.sh
source exec_TrigUpgradeTest_art_post.sh
# uncomment the line above and remove this message to restore the test after the job opts are fixed
echo "test disabled temporarily"
......@@ -4,7 +4,6 @@
// Framework includes
// TrigUpgradeTest includes
#include "./ITestHypoTool.h"
namespace HLTTest {
......
......@@ -2,7 +2,6 @@
Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
*/
// TrigUpgradeTest includes
#include "TestHypoAlg.h"
namespace HLTTest {
......
/*
Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
*/
// TrigUpgradeTest includes
#include "TestHypoTool.h"
#include "TrigCompositeUtils/HLTIdentifier.h"
......
......@@ -2,7 +2,6 @@
Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
*/
// TrigUpgradeTest includes
#include "TestInputMaker.h"
#include <xAODTrigger/TrigCompositeAuxContainer.h>
#include <TrigSteeringEvent/TrigRoiDescriptorCollection.h>
......
......@@ -2,7 +2,6 @@
Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
*/
// TrigUpgradeTest includes
#include <iostream>
#include <fstream>
#include <xAODTrigger/TrigCompositeAuxContainer.h>
......
......@@ -10,7 +10,7 @@ find_package( requests )
# Install files from the package:
atlas_install_python_modules( python/*.py )
atlas_install_joboptions( share/TrigInDetValidation_*.py )
atlas_install_joboptions( share/TrigInDetValidation_*.py share/Cosmic.py )
atlas_install_runtime( test/TrigInDetValidation_TestConfiguration.xml share/TrigInDetValidation_*.py TIDAbuild )
atlas_install_scripts( scripts/TIDA*.py test/test*.py POST_BUILD_CMD ${ATLAS_FLAKE8} --extend-ignore=ATL902 )
......
......@@ -3,6 +3,7 @@
#
doWriteRDOTrigger = False
doWriteBS = False
doEmptyMenu = True
include("TriggerJobOpts/runHLT_standalone.py")
from AthenaCommon.AlgSequence import AlgSequence
......
#!/usr/bin/env python
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# art-description: athenaMT trigger test using Cosmic.py job options
# art-type: build
# art-include: master/Athena
# Skipping art-output which has no effect for build tests.
# If you create a grid version, check art-output in existing grid tests.
from TrigValTools.TrigValSteering import Test, ExecStep, CheckSteps
ex = ExecStep.ExecStep()
ex.type = 'athena'
ex.job_options = 'TrigInDetValidation/Cosmic.py'
ex.input = 'data_cos'
ex.threads = 1
ex.max_events = 10
test = Test.Test()
test.art_type = 'build'
test.exec_steps = [ex]
test.check_steps = CheckSteps.default_check_steps(test)
import sys
sys.exit(test.run())
#!/usr/bin/env python
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# art-description: Same as empty_menu test from TrigUpgradeTest, but with athenaHLT
# art-description: Test running L1 result decoding, but no HLT chains
# art-type: build
# art-include: master/Athena
......
#!/usr/bin/env python
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# art-description: EmptyMenu test with forks=2, threads=2, concurrent_events=2
# art-description: Test running L1 result decoding, but no HLT chains, with forks=2, threads=2, concurrent_events=2
# art-type: build
# art-include: master/Athena
......
Trigger/TrigValidation/TrigUpgradeTest
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# Declare the package name:
atlas_subdir( TrigUpgradeTest )
# Install files from the package:
atlas_install_joboptions( share/*.py )
atlas_install_data( share/*.ref share/*.conf )
atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
atlas_install_scripts( test/exec*.sh test/test*.sh )
#!/bin/bash
# art-description: athenaMT trigger test running the new-style job options EmuNewJOTest.py
# art-type: build
# art-include: master/Athena
# Skipping art-output which has no effect for build tests.
# If you create a grid version, check art-output in existing grid tests.
export JOBOPTION="EmuNewJOTest.pkl"
export FROMPICKLE=1
export REGTESTEXP="TrigSignatureMo.*INFO HLT_.*"
# Find the regtest reference installed with the release
export REGTESTREF=`find_data.py TrigUpgradeTest/emu_newjo.ref`
get_files -remove -jo TrigUpgradeTest/EmuNewJOTest.py
echo $(date "+%FT%H:%M %Z")" Generate pickle file ${JOBOPTION}"
python EmuNewJOTest.py &> athena.pickle.log # generate pickle
status=$?
if [ ${status} -ne 0 ]; then
echo "ERROR in configuration generation stage, stopping"
echo "art-result: 1 Configuration"
export ATH_RETURN=1
export JOB_LOG="athena.pickle.log"
else
echo
echo "JOs reading stage finished, launching Athena from pickle file"
echo "art-result: 0 Configuration"
echo
source exec_TrigUpgradeTest_art_athenaMT.sh
# Merge log files for post-processing
JOB_LOG_MERGED="athena.merged.log"
echo "### athena.pickle.log ###" > ${JOB_LOG_MERGED}
cat athena.pickle.log >> ${JOB_LOG_MERGED}
echo "### ${JOB_LOG} ###"
cat ${JOB_LOG} >> ${JOB_LOG_MERGED}
export JOB_LOG=${JOB_LOG_MERGED}
fi
source exec_TrigUpgradeTest_art_post.sh
#!/bin/bash
#
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
#
# Trigger ART test execution script based on TriggerTest/test/exec_athena_art_trigger_validation.sh
#
### DEFAULTS
if [ -z "${NAME}" ]; then
testname=$(basename $0)
testname=${testname#test_}
testname=${testname%.sh}
export NAME=${testname}
fi
echo $(date "+%FT%H:%M %Z")" Execute Athena test ${NAME}"
if [ -z ${EVENTS} ]; then
export EVENTS="1000"
fi
if [ -z ${SKIPEVENTS} ]; then
export SKIPEVENTS="0"
fi
if [ -z ${JOBOPTION} ]; then
export JOBOPTION="TriggerJobOpts/runHLT_standalone.py"
fi
if [ -z ${JOB_LOG} ]; then
export JOB_LOG="athena.log"
fi
if [ -z ${INPUT} ]; then
export INPUT="run2data"
fi
if [ -z ${THREADS} ]; then
export THREADS="1"
fi
if [ -z ${SLOTS} ]; then
export SLOTS="1"
fi
# Run with PerfMon by default
if [ -z ${DOPERFMON} ] || [ ${DOPERFMON} -ne 0 ]; then
export PERFMONFLAG="--perfmon"
else
export PERFMONFLAG=""
fi
if [ -z ${STDCMATH} ] || [ ${STDCMATH} -eq 0 ]; then
if [ -f ${ATLASMKLLIBDIR_PRELOAD}/libimf.so ]; then
export MATHLIBOPT="--imf"
else
echo "!!! WARNING !!! Intel math library is not available, using standard cmath library instead. This may lead to differences in test results."
export MATHLIBOPT="--stdcmath"
fi
else
export MATHLIBOPT="--stdcmath"
fi
###
if [[ $INPUT == "run2data" ]]; then
export DS="/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/TrigP1Test/data18_13TeV.00360026.physics_EnhancedBias.merge.RAW._lb0151._SFO-1._0001.1"
elif [[ $INPUT == "run2mc_ttbar" ]]; then
# ttbar RDO_FTK produced in 21.3
export DS="/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/TriggerTest/valid1.410000.PowhegPythiaEvtGen_P2012_ttbar_hdamp172p5_nonallhad.digit.RDO_FTK.e4993_s3214_r11234_d1505/RDO_FTK.17071950._000065.pool.root.1,/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/TriggerTest/valid1.410000.PowhegPythiaEvtGen_P2012_ttbar_hdamp172p5_nonallhad.digit.RDO_FTK.e4993_s3214_r11234_d1505/RDO_FTK.17071950._000235.pool.root.1"
elif [[ $INPUT == "ttbar" ]]; then
# ttbar RDO produced with serial athena job in master r2019-03-20
export DS="/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/TriggerTest/valid1.410000.PowhegPythiaEvtGen_P2012_ttbar_hdamp172p5_nonallhad.merge.RDO.e4993_s3214_r11315/RDO.17533168._000001.pool.root.1,/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/TriggerTest/valid1.410000.PowhegPythiaEvtGen_P2012_ttbar_hdamp172p5_nonallhad.merge.RDO.e4993_s3214_r11315/RDO.17533168._000002.pool.root.1"
elif [[ $INPUT == "run2data_cosmic" ]]; then
export DS="/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/TrigP1Test/data16_cos.00306147.physics_Main.daq.RAW._lb0494._SFO-4._0001.data"
elif [[ $INPUT == "none" ]]; then
export DS=""
elif [[ -f $INPUT ]]; then
export DS=${INPUT}
else
echo "ERROR: input not defined, cannot run"
echo "art-result: 1"
exit 1
fi
######################################
# Generate empty PoolFileCatalog.xml - this prevents incorrect handling of crashes on the grid
ART_AVAILABLE=`which art.py >/dev/null 2>&1; echo $?`
if [[ $ART_AVAILABLE == "0" ]]; then
echo "Executing art.py createpoolfile"
art.py createpoolfile
fi
######################################
if [[ ${FROMPICKLE} == "1" ]]; then
echo "Running athena from pickle file ${JOBOPTION} with the command:"
(set -x
athena.py \
${MATHLIBOPT} \
${PERFMONFLAG} \
${ATHENAOPTS} \
${JOBOPTION} >${JOB_LOG} 2>&1
) 2>&1
else
echo "Running athena command:"
(set -x
athena.py \
${MATHLIBOPT} \
${PERFMONFLAG} \
${ATHENAOPTS} \
--threads ${THREADS} \
--concurrent-events ${SLOTS} \
--filesInput "${DS}" \
--evtMax ${EVENTS} \
--skipEvents ${SKIPEVENTS} \
-c "${EXTRA}" \
${JOBOPTION} >${JOB_LOG} 2>&1
) 2>&1
fi
######################################
export ATH_RETURN=$?
echo "art-result: ${ATH_RETURN} ${JOB_LOG%.*}"
echo $(date "+%FT%H:%M %Z")" Done executing Athena test ${NAME} with result ${ATH_RETURN}"
#!/bin/bash
#
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
#
# Trigger ART test script with post-exec checks based on TriggerTest/test/exec_art_triggertest_post.sh
#
echo $(date "+%FT%H:%M %Z")" Execute TrigUpgradeTest post processing for test ${NAME}"
### Find the branch name
if [ -n "${AtlasBuildBranch}" ]; then
export BRANCH=${AtlasBuildBranch} # available after asetup
elif [ -n "${gitlabTargetBranch}" ]; then
export BRANCH=${gitlabTargetBranch} # available in CI
else
echo "WARNING Cannot determine the branch name, both variables AtlasBuildBranch and gitlabTargetBranch are empty"
fi
### DEFAULTS
if [ -z ${ATH_RETURN} ]; then
echo "WARNING The env variable ATH_RETURN is not set, assuming 0"
export ATH_RETURN=0
fi
if [ -z ${JOB_LOG} ]; then
export JOB_LOG="athena.log"
fi
if [ -z ${TEST} ]; then
export TEST="TrigUpgradeTest"
fi
if [ -z ${ESDTOCHECK} ]; then
export ESDTOCHECK="ESD.pool.root"
fi
if [ -z ${AODTOCHECK} ]; then
export AODTOCHECK="AOD.pool.root"
fi
if [ -z ${REF_FOLDER} ]; then
# Try eos first
export REF_FOLDER="/eos/atlas/atlascerngroupdisk/data-art/grid-input/${TEST}/ref/${BRANCH}/test_${NAME}"
# If not available, try cvmfs
if [ -d ${REF_FOLDER} ]; then
echo "Using reference directory from eos: ${REF_FOLDER}"
else
export REF_FOLDER="/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/${TEST}/ref/${BRANCH}/test_${NAME}"
if [ -d ${REF_FOLDER} ]; then
echo "Reference from eos unavailable, using one from cvmfs: ${REF_FOLDER}"
else
echo "Reference directory for test ${NAME} not available from eos or cvmfs"
fi
fi
fi
# Disable RegTest by default and enable only if regex or ref file are defined
export DOREGTEST=0
if [ -n "${REGTESTEXP}" ] || [ -n "${REGTESTREF}" ]; then
export DOREGTEST=1
fi
# Note REGTESTEXP is a GNU grep regexp, not perl regexp
if [ -z "${REGTESTEXP}" ]; then
export REGTESTEXP="REGTEST"
fi
# Option to override the regtest reference location
if [ -z "${REGTESTREF}" ]; then
export REGTESTREF=${REF_FOLDER}/athena.regtest
fi
### CHECKLOG
# if athena failed and we are running in CI, print the full log to stdout
if [ "${ATH_RETURN}" -ne "0" ] && [ -n "${gitlabTargetBranch}" ]; then
echo "Printing the full ${JOB_LOG}"
cat ${JOB_LOG}
fi
echo $(date "+%FT%H:%M %Z")" Running checklog for errors"
timeout 5m check_log.py --config checklogTriggerTest.conf --errors --showexcludestats ${JOB_LOG} 2>&1 | tee checklog.log
echo "art-result: ${PIPESTATUS[0]} CheckLog"
echo $(date "+%FT%H:%M %Z")" Running checklog for warnings"
timeout 5m check_log.py --config checklogTriggerTest.conf --warnings --showexcludestats ${JOB_LOG} >warnings.log 2>&1
### PERFMON
if [ -f ntuple.pmon.gz ]; then
echo $(date "+%FT%H:%M %Z")" Running perfmon"
timeout 5m perfmon.py -f 0.90 ntuple.pmon.gz >perfmon.log 2>&1
fi
### HISTOGRAM COUNT
if [ -f expert-monitoring.root ]; then
echo $(date "+%FT%H:%M %Z")" Running histSizes"
timeout 5m histSizes.py -t expert-monitoring.root >histSizes.log 2>&1
fi
### MAKE LOG TAIL FILE
export JOB_LOG_TAIL=${JOB_LOG%%.*}.tail.${JOB_LOG#*.}
tail -10000 ${JOB_LOG} > ${JOB_LOG_TAIL}
### REGTEST
if [ ${DOREGTEST} -ne 0 ]; then
REGTESTREF_BASENAME=$(basename -- "${REGTESTREF}")
if [ -z "${REGTESTEXP_EXCLUDE}" ]; then
grep -E "${REGTESTEXP}" ${JOB_LOG} > "${REGTESTREF_BASENAME}"
else
grep -E "${REGTESTEXP}" ${JOB_LOG} | grep -v -E "${REGTESTEXP_EXCLUDE}" > "${REGTESTREF_BASENAME}"
fi
if [ -f ${REGTESTREF} ]; then
echo $(date "+%FT%H:%M %Z")" Running regtest using reference file ${REGTESTREF}"
timeout 5m regtest.pl --inputfile ${REGTESTREF_BASENAME} --reffile ${REGTESTREF} --linematch ".*" 2>&1 | tee regtest.log
echo "art-result: ${PIPESTATUS[0]} RegTest"
else
echo $(date "+%FT%H:%M %Z")" The reference file does not exist: ${REGTESTREF}"
echo "art-result: 999 RegTest"
fi
mv ${REGTESTREF_BASENAME} ${REGTESTREF_BASENAME}.new
fi
### ROOTCOMP
if [ -f ${REF_FOLDER}/expert-monitoring.root ]; then
echo $(date "+%FT%H:%M %Z")" Running rootcomp"
timeout 10m rootcomp.py --skip="TIME_" ${REF_FOLDER}/expert-monitoring.root expert-monitoring.root >rootcompout.log 2>&1
echo "art-result: ${PIPESTATUS[0]} RootComp"
elif [ -f expert-monitoring.root ]; then
echo $(date "+%FT%H:%M %Z")" No reference expert-monitoring.root found in ${REF_FOLDER}"
echo "art-result: 999 RootComp"
else
echo $(date "+%FT%H:%M %Z")" No expert-monitoring.root file and no reference are found - skipping RootComp"
fi
### CHAINDUMP
# SKIP_CHAIN_DUMP=1 skips this step
# Using temporary workaround to dump HLTChain.txt
if [ -f expert-monitoring.root ] && [ $[SKIP_CHAIN_DUMP] != 1 ]; then
echo "Running chainDump"
timeout 5m chainDump.py -f expert-monitoring.root --json >ChainDump.log 2>&1
fi
### CHECKFILE
if [ -f ${ESDTOCHECK} ]; then
echo $(date "+%FT%H:%M %Z")" Running CheckFile on ESD"
timeout 10m checkFile.py ${ESDTOCHECK} >${ESDTOCHECK}.checkFile 2>&1
echo "art-result: ${PIPESTATUS[0]} CheckFileESD"
else
echo $(date "+%FT%H:%M %Z")" No ESD file to check"
fi
if [ -f ${AODTOCHECK} ]; then
echo $(date "+%FT%H:%M %Z")" Running CheckFile on AOD"
timeout 10m checkFile.py ${AODTOCHECK} >${AODTOCHECK}.checkFile 2>&1
echo "art-result: ${PIPESTATUS[0]} CheckFileAOD"
echo $(date "+%FT%H:%M %Z")" Running CheckxAOD AOD"
timeout 10m checkxAOD.py ${AODTOCHECK} >${AODTOCHECK}.checkxAOD 2>&1
echo "art-result: ${PIPESTATUS[0]} CheckXAOD"
echo $(date "+%FT%H:%M %Z")" Running checkFileTrigSize_RTT.py AOD"
export TRIGSIZE_LOG=${JOB_LOG%%.*}.TrigEDMSize.${JOB_LOG#*.}
timeout 10m checkFileTrigSize_RTT.py ${AODTOCHECK} > ${TRIGSIZE_LOG} 2>&1
echo "art-result: ${PIPESTATUS[0]} checkFileTrigSize_RTT.py"
else
echo $(date "+%FT%H:%M %Z")" No AOD file to check"
fi
### GENERATE JSON WITH POST-PROCESSING INFORMATION
echo $(date "+%FT%H:%M %Z")" Running trig-test-json.py"
timeout 5m trig-test-json.py
cat extra-results.json && echo
### SUMMARY
echo $(date "+%FT%H:%M %Z")" Files in directory:"
ls -lh
echo $(date "+%FT%H:%M %Z")" Finished TrigUpgradeTest post processing for test ${NAME}"
#!/bin/bash
# art-description: athenaMT trigger test using IDCalo job options
# art-type: build
# art-include: master/Athena
# Skipping art-output which has no effect for build tests.
# If you create a grid version, check art-output in existing grid tests.
export EVENTS=10
export THREADS=1
export SLOTS=1
export JOBOPTION="TrigUpgradeTest/Cosmic.py"
#export EXTRA="isOnline=True;doEmptyMenu=True;doWriteBS=False;doWriteRDOTrigger=True;forceEnableAllChains=True"
#export EXTRA="isOnline=True;doEmptyMenu=True;doWriteBS=False"
export EXTRA="doEmptyMenu=True;doWriteBS=False;doWriteRDOTrigger=True;"
export INPUT="run2data_cosmic"
#export ATHENAOPTS=" -l DEBUG "
# Skip dumping chain counts because this test doesn't produce the histogram including them
export SKIP_CHAIN_DUMP=1
source exec_TrigUpgradeTest_art_athenaMT.sh
source exec_TrigUpgradeTest_art_post.sh
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment