diff --git a/Control/CalypsoExample/Digitization/scripts/faser_digi.py b/Control/CalypsoExample/Digitization/scripts/faser_digi.py index 9a1ced5b0ca751c6ff2b975083984c4b9890eee6..72f890a3bd3b7f352f60c66207de15aaa2ec4961 100755 --- a/Control/CalypsoExample/Digitization/scripts/faser_digi.py +++ b/Control/CalypsoExample/Digitization/scripts/faser_digi.py @@ -2,28 +2,32 @@ # # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration # Run with: -# ./faser_digi.py filepath runtype +# ./faser_digi.py filepath # # filepath - fully qualified path, including url if needed, to the input HITS file # example: "root://eospublic.cern.ch//eos/experiment/faser/sim/GeniePilot/HITS/1/faser.150fbInv.1.001.HITS.pool.root" # -# runtype - MANDATORY flag to specify the data type (TI12OldMC or TI12MC or TestBeamMC). -# Not extracted (yet) from file path for MC data +# Options: +# --geom=runtype - flag to specify the data type (TI12OldMC or TI12MC or TestBeamMC). +# default to TI12MC # import sys +import time import argparse -parser = argparse.ArgumentParser(description="Run FASER reconstruction") +a = time.time() + +parser = argparse.ArgumentParser(description="Run FASER digitization") parser.add_argument("file_path", help="Fully qualified path of the raw input file") -parser.add_argument("run_type", nargs="?", default="", - help="Specify run type (if it can't be parsed from path)") +parser.add_argument("-g", "--geom", default="TI12MC", + help="Specify geometry (default: TI12MC, alt: TestBeamMC)") parser.add_argument("-t", "--tag", default="", - help="Specify tag (to append to output filename)") + help="Specify digi tag (to append to output filename)") parser.add_argument("--highCaloGain", action='store_true', help="Use high gain settings for calo PMTs") -parser.add_argument("-n", "--nevents", type=int, default=-1, +parser.add_argument("-n", "--nevts", type=int, default=-1, help="Specify number of events to process (default: all)") parser.add_argument("-v", "--verbose", action='store_true', help="Turn on DEBUG output") @@ -35,22 +39,11 @@ from pathlib import Path filepath=Path(args.file_path) # runtype has been provided -if len(args.run_type) > 0: - runtype=args.run_type - -# Extract runtype from path -# Should be directory above run -# i.e.: TestBeamData/Run-004150/Faser-Physics-004150-00000.raw" -else: - if True or len(filepath.parts) < 3: - print("Can't determine run type from path - specify on command line ") - sys.exit(-1) - -# runtype = filepath.parts[-3] +runtype= args.geom print(f"Starting digitization of {filepath.name} with type {runtype}") -if args.nevents > 0: - print(f"Reconstructing {args.nevents} events by command-line option") +if args.nevts > 0: + print(f"Reconstructing {args.nevts} events by command-line option") # Start digitization @@ -88,21 +81,26 @@ elif runtype == "TI12MC": ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" else: - print("Invalid run type found:", runtype) - print("Specify correct type or update list") + print("Invalid geometry type found:", runtype) + print("Specify correct geometry or update list") sys.exit(-1) - # Must use original input string here, as pathlib mangles double // in path names ConfigFlags.Input.Files = [ args.file_path ] filestem = filepath.stem +# Remove any filetype modifier +if filestem[-5:] == "-HITS": + filestem = filestem[:-5] + if len(args.tag) > 0: - filestem += f"-{args.tag}" + print(f"{args.tag} in {filestem}?") + if args.tag in filestem: + print(f"Not adding tag {args.tag} to file {filestem}") + else: + filestem += f"-{args.tag}" -# ConfigFlags.addFlag("Output.xAODFileName", f"{filestem}-xAOD.root") ConfigFlags.Output.RDOFileName = f"{filestem}-RDO.root" - # # Play around with this? # ConfigFlags.Concurrency.NumThreads = 2 @@ -138,59 +136,6 @@ acc.merge(CaloWaveformDigitizationCfg(ConfigFlags, CB_norm=calo_norm)) from ScintDigiAlgs.ScintDigiAlgsConfig import ScintWaveformDigitizationCfg acc.merge(ScintWaveformDigitizationCfg(ConfigFlags)) -# from WaveRecAlgs.WaveRecAlgsConfig import WaveformReconstructionCfg -# acc.merge(WaveformReconstructionCfg(ConfigFlags)) - -# # Not ready for primetime -# # from CaloRecAlgs.CaloRecAlgsConfig import CalorimeterReconstructionCfg -# # acc.merge(CalorimeterReconstructionCfg(ConfigFlags)) - -# # Tracker clusters -# from TrackerPrepRawDataFormation.TrackerPrepRawDataFormationConfig import FaserSCT_ClusterizationCfg -# acc.merge(FaserSCT_ClusterizationCfg(ConfigFlags)) - -# # SpacePoints -# from TrackerSpacePointFormation.TrackerSpacePointFormationConfig import TrackerSpacePointFinderCfg -# acc.merge(TrackerSpacePointFinderCfg(ConfigFlags)) - -# # Try Dave's fitter -# from TrackerClusterFit.TrackerClusterFitConfig import ClusterFitAlgCfg -# acc.merge(ClusterFitAlgCfg(ConfigFlags)) - -# -# Configure output -# from OutputStreamAthenaPool.OutputStreamConfig import OutputStreamCfg -# itemList = [ "xAOD::EventInfo#*" -# , "xAOD::EventAuxInfo#*" -# , "xAOD::FaserTriggerData#*" -# , "xAOD::FaserTriggerDataAux#*" -# , "FaserSCT_RDO_Container#*" -# # , "Tracker::FaserSCT_ClusterContainer#*" -# # , "FaserSCT_SpacePointContainer#*" -# # , "FaserSCT_SpacePointOverlapCollection#*" -# # , "TrackCollection#*" -# ] -# acc.merge(OutputStreamCfg(ConfigFlags, "xAOD", itemList)) - -# Waveform reconstruction output -# from WaveRecAlgs.WaveRecAlgsConfig import WaveformReconstructionOutputCfg -# acc.merge(WaveformReconstructionOutputCfg(ConfigFlags)) - -# Calorimeter reconstruction output -# from CaloRecAlgs.CaloRecAlgsConfig import CalorimeterReconstructionOutputCfg -# acc.merge(CalorimeterReconstructionOutputCfg(ConfigFlags)) - -# Check what we have -# print( "Writing out xAOD objects:" ) -# print( acc.getEventAlgo("OutputStreamxAOD").ItemList ) - -# Hack to avoid problem with our use of MC databases when isMC = False -# replicaSvc = acc.getService("DBReplicaSvc") -# replicaSvc.COOLSQLiteVetoPattern = "" -# replicaSvc.UseCOOLSQLite = True -# replicaSvc.UseCOOLFrontier = False -# replicaSvc.UseGeomSQLite = True - # Configure verbosity if args.verbose: acc.foreach_component("*").OutputLevel = VERBOSE @@ -204,4 +149,10 @@ acc.foreach_component("*ClassID*").OutputLevel = INFO acc.getService("MessageSvc").Format = "% F%40W%S%7W%R%T %0W%M" # Execute and finish -sys.exit(int(acc.run(maxEvents=args.nevents).isFailure())) +sc = acc.run(maxEvents=args.nevts) + +b = time.time() +from AthenaCommon.Logging import log +log.info(f"Finish execution in {b-a} seconds") + +sys.exit(not sc.isSuccess()) diff --git a/Control/CalypsoExample/Digitization/scripts/faser_digi_merge.py b/Control/CalypsoExample/Digitization/scripts/faser_digi_merge.py new file mode 100755 index 0000000000000000000000000000000000000000..89fe4fb095846cba78e799b272fafd81302b757f --- /dev/null +++ b/Control/CalypsoExample/Digitization/scripts/faser_digi_merge.py @@ -0,0 +1,235 @@ +#!/usr/bin/env python +# +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Run with: +# ./faser_digi_merge.py dirpath +# +# filepath - fully qualified path, to the directory with input HITS file +# example: "/eos/experiment/faser/sim/tb21/particle_gun/000100/rdo/test" +# +# Options: +# --geom=runtype - flag to specify the data type (TI12OldMC or TI12MC or TestBeamMC). +# default to TI12MC +# +import sys +import time +import argparse + +a = time.time() + +parser = argparse.ArgumentParser(description="Run FASER digitization") + +parser.add_argument("dir_path", + help="Fully qualified path of the input file directory") +parser.add_argument("-p", "--partial", action="store_true", + help="Allow partial merge (default: all specified files required)") +parser.add_argument("-g", "--geom", default="TI12MC", + help="Specify geometry (default: TI12MC, alt: TestBeamMC)") +parser.add_argument("-s", "--slice", type=int, default=0, + help="Specify file slice to produce") +parser.add_argument("-f", "--files", type=int, default=5, + help="Specify number of input files to run in one batch") +parser.add_argument("-t", "--tag", default="", + help="Specify digi tag (to append to output filename)") +parser.add_argument("--highCaloGain", action='store_true', + help="Use high gain settings for calo PMTs") +parser.add_argument("-n", "--nevts", type=int, default=-1, + help="Specify number of events to process (default: all)") +parser.add_argument("-v", "--verbose", action='store_true', + help="Turn on DEBUG output") + +args = parser.parse_args() + +from pathlib import Path + +dirpath = Path(args.dir_path) + +# runtype has been provided +runtype=args.geom + +# Does the directory exist? +if not (dirpath.exists() and dirpath.is_dir()): + print(f"Problem with directory {args.dir_path}") + sys.exit(1) + +# Create segment list +seglist = list(range(args.slice*args.files, (args.slice+1)*args.files)) + +# Now build file list +filelist = [] +dirlist = list(dirpath.glob('FaserMC-*-HITS.root')) +if len(dirlist) == 0: + print(f"No HITS file found in directory {args.dir_path}") + sys.exit(1) + +for seg in seglist: + # Assume these are in numerical order from 0 + if seg >= len(dirlist): + print(f"Requested file segment {seg} but only {len(dirlist)} files found") + if args.partial: + break + else: + sys.exit(1) # Abort this job + + # Check if segment number exists in hits file (this is not perfect) + segstr = f"{seg:05d}" + if segstr not in dirlist[seg]: + print(f"Segment {segstr} not in file {dirlist[seg]}!") + if not args.partial: sys.exit(1) # abort + filelist.append(dirlist[seg]) + +if len(filelist) == 0: + # Asked for range that doesn't exist + print(f"No files found for slice {args.slice} with Nfiles={args.files}") + sys.exit(1) + +# Figure out the file pattern for the output +stem = filelist[0].stem +spl = stem.split('-') +short = spl[1] +run = spl[2] +seglo = int(spl[3]) +# Can be multiple tags +tagstr = '' +for tag in spl[4:]: + if tag == "HITS": break + if len(tagstr) > 0: + tagstr += "-" + tagstr += tag + +# Also find the largest file number +stem = filelist[-1].stem +spl = stem.split('-') +seghi = int(spl[3]) + +# Build output filename +if seglo == 0 and (seghi+1) == len(dirlist): # Full run + outfile = f"FaserMC-{short}-{run}" +elif seglo == seghi: # Single segment + outfile = f"FaserMC-{short}-{run}-{seglo:05}" +else: + outfile = f"FaserMC-{short}-{run}-{seglo:05}-{seghi:05}" + +# Add existing tag +if len(tagstr) > 0: + outfile += f"-{tagstr}" + +# Was a tag requested? +if len(args.tag) > 0: + if args.tag in tagstr: + print(f"Not adding tag {args.tag} to file {filelist[0]}") + else: + outfile += f"-{args.tag}" + +# Finish output file +outfile += "-RDO.root" + +print(f"Found files from {seglo} to {seghi}") +print(f"Starting digitization of outfile {outfile} with type {runtype}") +if args.nevts > 0: + print(f"Reconstructing {args.nevts} events by command-line option") + +# Start digitization + +from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator +from AthenaConfiguration.ComponentFactory import CompFactory +from AthenaCommon.Constants import VERBOSE, INFO + +from AthenaCommon.Configurable import Configurable +from CalypsoConfiguration.AllConfigFlags import ConfigFlags + +Configurable.configurableRun3Behavior = True + +# Flags for this job +ConfigFlags.Input.isMC = True # Needed to bypass autoconfig +ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions for now + +ConfigFlags.Input.ProjectName = "mc20" +ConfigFlags.GeoModel.Align.Dynamic = False +ConfigFlags.Beam.NumberOfCollisions = 0. +ConfigFlags.Digitization.TruthOutput = True + +# TI12 old geometry +if runtype == "TI12OldMC": + ConfigFlags.GeoModel.FaserVersion = "FASER-01" + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-01" + +# Testbeam setup +elif runtype == "TestBeamMC" : + ConfigFlags.GeoModel.FaserVersion = "FASER-TB00" + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-TB00" + +# New TI12 geometry (ugh) +elif runtype == "TI12MC": + ConfigFlags.GeoModel.FaserVersion = "FASERNU-03" + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" + +else: + print("Invalid run type found:", runtype) + print("Specify correct type or update list") + sys.exit(-1) + + +# Try just passing the filelist +if args.dir_path[:22] == '/eos/experiment/faser/': + ConfigFlags.Input.Files = [f"root://eospublic.cern.ch/{str(file)}" for file in filelist] +else: + ConfigFlags.Input.Files = [str(file) for file in filelist] + +ConfigFlags.Output.RDOFileName = outfile + +# +# Play around with this? +# ConfigFlags.Concurrency.NumThreads = 2 +# ConfigFlags.Concurrency.NumConcurrentEvents = 2 +ConfigFlags.lock() + +# +# Configure components +from CalypsoConfiguration.MainServicesConfig import MainServicesCfg +from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg +from AthenaPoolCnvSvc.PoolWriteConfig import PoolWriteCfg + +acc = MainServicesCfg(ConfigFlags) +acc.merge(PoolReadCfg(ConfigFlags)) +acc.merge(PoolWriteCfg(ConfigFlags)) + +# +# Needed, or move to MainServicesCfg? +from FaserGeoModel.FaserGeoModelConfig import FaserGeometryCfg +acc.merge(FaserGeometryCfg(ConfigFlags)) + +# Set up algorithms +from FaserSCT_Digitization.FaserSCT_DigitizationConfigNew import FaserSCT_DigitizationCfg +acc.merge(FaserSCT_DigitizationCfg(ConfigFlags)) + +from CaloDigiAlgs.CaloDigiAlgsConfig import CaloWaveformDigitizationCfg +if args.highCaloGain: + calo_norm = 25. +else: + calo_norm = 5. +acc.merge(CaloWaveformDigitizationCfg(ConfigFlags, CB_norm=calo_norm)) + +from ScintDigiAlgs.ScintDigiAlgsConfig import ScintWaveformDigitizationCfg +acc.merge(ScintWaveformDigitizationCfg(ConfigFlags)) + +# Configure verbosity +if args.verbose: + acc.foreach_component("*").OutputLevel = VERBOSE + ConfigFlags.dump() + +else: + acc.foreach_component("*").OutputLevel = INFO + +acc.foreach_component("*ClassID*").OutputLevel = INFO + +acc.getService("MessageSvc").Format = "% F%40W%S%7W%R%T %0W%M" + +# Execute and finish +sc = acc.run(maxEvents=args.nevts) + +b = time.time() +from AthenaCommon.Logging import log +log.info(f"Finish execution in {b-a} seconds") + +sys.exit(not sc.isSuccess()) diff --git a/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi.sh b/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi.sh index 21ac59fbd1f7d717abc307a31216b9b402b43f76..b6b0c80c42a44e1d3731bd92e2fa800aa8036316 100755 --- a/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi.sh +++ b/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi.sh @@ -186,7 +186,7 @@ then ls -l echo "copy *-RDO.root to $outdest" mkdir -p $outdest - eos cp *-RDO.root $outdest + eos cp *-RDO.root ${outdest}/ || true fi # # Also copy log file @@ -196,12 +196,12 @@ then ls -l echo "copy $logfile to $logdest" mkdir -p $logdest - eos cp $logfile $logdest + eos cp $logfile $logdest/$logfile elif ! [ -z "$outdest" ] then cd .. ls -l echo "copy $logfile to $outdest" mkdir -p $outdest - eos cp $logfile $outdest + eos cp $logfile $outdest/$logfile fi diff --git a/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi_merge.sh b/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi_merge.sh index e5d45909651a8f02805dc5a29c9b7c16a531c57b..ca2e27fcc6956e1ff180dacc41cf1f9ee2e304bf 100755 --- a/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi_merge.sh +++ b/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi_merge.sh @@ -209,7 +209,7 @@ then ls -l echo "copy *-RDO.root to $outdest" mkdir -p $outdest - eos cp *-RDO.root $outdest + eos cp *-RDO.root ${outdest}/ || true fi # # Also copy log file @@ -219,12 +219,12 @@ then ls -l echo "copy $logfile to $logdest" mkdir -p $logdest - eos cp $logfile $logdest + eos cp $logfile $logdest/$logfile elif ! [ -z "$outdest" ] then cd .. ls -l echo "copy $logfile to $outdest" mkdir -p $outdest - eos cp $logfile $outdest + eos cp $logfile $outdest/$logfile fi diff --git a/Control/CalypsoExample/Digitization/scripts/submit_faser_digi.sh b/Control/CalypsoExample/Digitization/scripts/submit_faser_digi.sh new file mode 100644 index 0000000000000000000000000000000000000000..76abf8172614a17a1f3290f45896f1c6978b5439 --- /dev/null +++ b/Control/CalypsoExample/Digitization/scripts/submit_faser_digi.sh @@ -0,0 +1,223 @@ +#!/bin/bash +# Used with a condor file to submit to vanilla universe +# +# Usage: +# submit_faser_digi.sh [--highGain] filepath [release_directory] [working_directory] +# +# Options: +# --highGain - apply high gain settings to the Calorimeter PMTs (for muons) +# --geom - geometry setting +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file +# +# filepath - full file name (with path) +# release_directory - optional path to release install directory (default pwd) +# working_directory - optional path to output directory location (default pwd) +# +# The release directory must already be set up +# (so an unqualified asetup can set up the release properly) +# +# Script will use git describe to find the release tag. +# If this matches sim/s???? or digi/d???? it will be passed to the job +# +#---------------------------------------- +# Keep track of time +SECONDS=0 +# +# Parse command-line options +while [ -n "$1" ] +do + case "$1" in + --highGain) + echo "Applying high gain settings" + highgain=1 + shift;; # This 'eats' the argument + + -g | --geom) + geom="$2"; + shift; + shift;; + + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done +# +# Parse command-line options +file_path=${1} +release_directory=${2} +working_directory=${3} +# +# Set defaults if arguments aren't provided +if [ -z "$file_path" ] +then + echo "No file specified!" + echo "Usage: submit_faser_digi.sh [--highGain] file [release dir] [output dir]" + exit 1 +fi +# +if [ -z "$release_directory" ] +then + release_directory=`pwd` +fi +# +if [ -z "$working_directory" ] +then + working_directory=`pwd` +fi +# +starting_directory=`pwd` +# +# Now extract the run number and file stem +# +# First, get the filename +file_name=$(basename "$file_path") +# +# Now split based on '.' to get stem +defaultIFS=$IFS +IFS='.' +read file_stem ext <<< "$file_name" +# +# Finally extract the run number +IFS='-' +# Read the split words into an array based on delimiter +read faser short run_number segment <<< "$file_stem" +# +# Set the IFS delimeter back or else echo doesn't work... +IFS=$defaultIFS +# +# Make output directory if needed +output_directory="$working_directory/$run_number" +mkdir -p "$output_directory" +# +# This magic redirects everything in this script to our log file +logfile="${file_stem}.rdo.log" +exec >& "${output_directory}/${logfile}" +echo `date` - $HOSTNAME +echo "File: $file_name" +echo "Geom: $geom" +echo "Release: $release_directory" +echo "Output: $output_directory" +echo "Starting: $starting_directory" +# +# Set up the release (do this automatically)? +export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase +source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh +# +# Try automatic +# Always go back to the starting directory in case paths are relative +cd "$starting_directory" +cd "$release_directory" +# asetup +# source build/x8*/setup.sh +# +# Do this by hand +asetup --input=calypso/asetup.faser Athena,22.0.49 +source run/setup.sh +#source build/x86*/setup.sh +# +# +# Try to find a release tag +cd calypso +recotag=`git describe` +if [[ "$recotag" == "reco/r"???? ]]; then + tag=`echo "$recotag" | cut -c 6-11` + echo "Found reco tag: $tag" +fi +if [[ "$recotag" == "digi/d"???? ]]; then + tag=`echo "$recotag" | cut -c 6-11` + echo "Found digi tag: $tag" +fi +if [[ "$recotag" == "sim/s"???? ]]; then + tag=`echo "$recotag" | cut -c 5-10` + echo "Found sim tag: $tag" +fi +# +# Move to the run directory +cd "$starting_directory" +cd "$output_directory" +# +# Remove any previous directory if it exists +#if [[ -e "$file_stem" ]]; then +# echo "Remove previous directory $file_stem" +# rm -rf "$file_stem" +#fi +# +# Make run directory +if [[ -e "$file_stem" ]]; then + echo "Directory $file_stem already exists" +else + mkdir "$file_stem" +fi +cd "$file_stem" +# +# Run job +# +if [[ -z "$highgain" ]]; then + gainstr="" +else + gainstr="--highCaloGain" +fi +# +if [[ -z "$geom" ]]; then + geomstr="" +else + geomstr="--geom $geom" +fi +# +if [[ -z "$tag" ]]; then + tagstr="" +else + tagstr="--tag=$tag" +fi +# +faser_digi.py $geomstr $gainstr $tagstr "$file_path" +# +# Print out ending time +date +echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-RDO.root to $outdest" + mkdir -p $outdest + eos cp *-RDO.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Digitization/scripts/submit_faser_digi_merge.sh b/Control/CalypsoExample/Digitization/scripts/submit_faser_digi_merge.sh new file mode 100755 index 0000000000000000000000000000000000000000..fd2fe20e2d5f3b5829a6f57dbb83d9a6480ab6b6 --- /dev/null +++ b/Control/CalypsoExample/Digitization/scripts/submit_faser_digi_merge.sh @@ -0,0 +1,245 @@ +#!/bin/bash +# Used with a condor file to submit to vanilla universe +# +# Usage: +# submit_faser_digi_merge.sh [--highGain] dirpath slice nfiles [release_directory] [working_directory] +# +# Options: +# --highGain - apply high gain settings to the Calorimeter PMTs (for muons) +# --geom - geometry setting +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file +# +# dirpath - full directory path to HITS files +# slice - ordinal output file number +# nfiles - number of HITS files to process per slice +# release_directory - optional path to release install directory (default pwd) +# working_directory - optional path to output directory location (default pwd) +# +# The release directory must already be set up +# (so an unqualified asetup can set up the release properly) +# +# Script will use git describe to find the release tag. +# If this matches sim/s???? or digi/d???? it will be passed to the job +# +#---------------------------------------- +# Keep track of time +SECONDS=0 +# +# Job options strings +gainstr="" +partialstr="" +geomstr="" +# +# Parse command-line options +while [ -n "$1" ] +do + case "$1" in + --highGain) + echo "Applying high gain settings" + gainstr="--highCaloGain" + shift;; # This 'eats' the argument + + --partial) + echo "Allowing partial merge" + partialstr="--partial" + shift;; + + -g | --geom) + geomstr="--geom $2"; + shift; + shift;; + + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done +# +# Parse command-line options +dir_path=${1} +slice=${2} +nfiles=${3} +release_directory=${4} +working_directory=${5} +# +# Set defaults if arguments aren't provided +if [ -z "$dir_path" ] +then + echo "No directory specified!" + echo "Usage: submit_faser_digi_merge.sh directory slice nfiles [release dir] [output dir]" + exit 1 +fi +# +if [ -z "$slice" ] +then + echo "Slice number not specified!" + echo "Usage: submit_faser_digi_merge.sh directory slice nfiles [release dir] [output dir]" + exit 1 +fi +# +if [ -z "$nfiles" ] +then + echo "Files per slice not specified!" + echo "Usage: submit_faser_digi_merge.sh directory slice nfiles [release dir] [output dir]" + exit 1 +fi +# +if [ -z "$release_directory" ] +then + release_directory=`pwd` +fi +# +if [ -z "$working_directory" ] +then + working_directory=`pwd` +fi +# +starting_directory=`pwd` +# +# Now extract the run number and file stem +# +# First, get an example filename +file_name=`ls -1 $dir_path | head -1` +# +# Now split based on '.' to get stem +defaultIFS=$IFS +IFS='.' +read file_stem ext <<< "$file_name" +# +# Finally extract the run number +IFS='-' +# Read the split words into an array based on delimiter +read faser short run_number segment <<< "$file_stem" +# +# Set the IFS delimeter back or else echo doesn't work... +IFS=$defaultIFS +# +# Make output directory if needed +output_directory="$working_directory/$run_number" +mkdir -p "$output_directory" +# +# Need to make up an output name +file_stem="$faser-$short-$run_number-RDO-merge-$slice" +# +# This magic redirects everything in this script to our log file +logfile="${file_stem}.rdo.log" +exec >& "$output_directory/$logfile" +echo `date` - $HOSTNAME +echo "Directory: $dir_path" +echo "Geom: $geom" +echo "Slice: $slice" +echo "NFiles: $nfiles" +echo "Release: $release_directory" +echo "Output: $output_directory" +echo "Starting: $starting_directory" +echo "job: $file_stem" +# +# Set up the release (do this automatically)? +export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase +source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh +# +# Try automatic +# Always go back to the starting directory in case paths are relative +cd "$starting_directory" +cd "$release_directory" +# asetup +# source build/x8*/setup.sh +# +# Do this by hand +asetup --input=calypso/asetup.faser Athena,22.0.49 +# source build/x86*/setup.sh +source run/setup.sh +# +# +# Try to find a release tag +cd calypso +recotag=`git describe` +if [[ "$recotag" == "reco/r"???? ]]; then + tag=`echo "$recotag" | cut -c 6-11` + echo "Found reco tag: $tag" +fi +if [[ "$recotag" == "digi/d"???? ]]; then + tag=`echo "$recotag" | cut -c 6-11` + echo "Found digi tag: $tag" +fi +if [[ "$recotag" == "sim/s"???? ]]; then + tag=`echo "$recotag" | cut -c 5-10` + echo "Found sim tag: $tag" +fi +# +if [[ -z "$tag" ]]; then + tagstr="" +else + tagstr="-- $tag" +fi +# +# Move to the run directory +cd "$starting_directory" +cd "$output_directory" +# +# Remove any previous directory if it exists +#if [[ -e "$file_stem" ]]; then +# echo "Remove previous directory $file_stem" +# rm -rf "$file_stem" +#fi +# +# Make run directory +if [[ -e "$file_stem" ]]; then + echo "Directory $file_stem already exists" +else + mkdir "$file_stem" +fi +cd "$file_stem" +# +# Run job +# +faser_digi_merge.py $partialstr $geomstr $gainstr $tagstr --slice $slice --files $nfiles $dir_path +# +# Print out ending time +date +echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-RDO.root to $outdest" + mkdir -p $outdest + eos cp *-RDO.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101307.json b/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101307.json new file mode 100644 index 0000000000000000000000000000000000000000..db2954f994dd7c74c9fb30b8e9bda28ac3eea966 --- /dev/null +++ b/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101307.json @@ -0,0 +1,14 @@ +{ + "file_length": 5000, + "mass": 105.66, + "maxE": 5000.0, + "minE": 10.0, + "pid": [-13, 13], + "radius": -25., + "angle": 0.0006, + "run": 101307, + "sampler": "log", + "segment": 0, + "short": "MDC_PG_muon_fasernu_logE", + "zpos": -3990.0 +} diff --git a/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_100GeV-000200.json b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_100GeV-000200.json new file mode 100644 index 0000000000000000000000000000000000000000..5296e25aa0b1e77b34d6a5c870aa0b342c446331 --- /dev/null +++ b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_100GeV-000200.json @@ -0,0 +1,14 @@ +{ + "file_length": 1000, + "geom": "TestBeamMC", + "mass": 0.511, + "maxE": 100.0, + "minE": 100.0, + "pid": [-11, 11], + "radius": -100.0, + "run": 200, + "sampler": "const", + "segment": 0, + "short": "TB_PG_elec_100GeV", + "zpos": -1000.0 +} diff --git a/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_200GeV-000203.json b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_200GeV-000203.json new file mode 100644 index 0000000000000000000000000000000000000000..d05ca3f6bab5c29e3bee3e792b1fc8274e8749f8 --- /dev/null +++ b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_200GeV-000203.json @@ -0,0 +1,14 @@ +{ + "file_length": 1000, + "geom": "TestBeamMC", + "mass": 0.511, + "maxE": 200.0, + "minE": 200.0, + "pid": [-11, 11], + "radius": -100.0, + "run": 203, + "sampler": "const", + "segment": 0, + "short": "TB_PG_elec_200GeV", + "zpos": -1000.0 +} diff --git a/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_30GeV-000202.json b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_30GeV-000202.json new file mode 100644 index 0000000000000000000000000000000000000000..332f05dbadc980e83872766ecff39e05c985f4af --- /dev/null +++ b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_30GeV-000202.json @@ -0,0 +1,14 @@ +{ + "file_length": 1000, + "geom": "TestBeamMC", + "mass": 0.511, + "maxE": 30.0, + "minE": 30.0, + "pid": [-11, 11], + "radius": -100.0, + "run": 202, + "sampler": "const", + "segment": 0, + "short": "TB_PG_elec_30GeV", + "zpos": -1000.0 +} diff --git a/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_50GeV-000201.json b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_50GeV-000201.json new file mode 100644 index 0000000000000000000000000000000000000000..6423bea683751eef9fad1a7e07974ba833f590b7 --- /dev/null +++ b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_50GeV-000201.json @@ -0,0 +1,14 @@ +{ + "file_length": 1000, + "geom": "TestBeamMC", + "mass": 0.511, + "maxE": 50.0, + "minE": 50.0, + "pid": [-11, 11], + "radius": -100.0, + "run": 201, + "sampler": "const", + "segment": 0, + "short": "TB_PG_elec_50GeV", + "zpos": -1000.0 +} diff --git a/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_muon_100GeV-000100.json b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_muon_100GeV-000100.json new file mode 100644 index 0000000000000000000000000000000000000000..d6c6eee30676d772ab4a9fdd6cee9e46365658cd --- /dev/null +++ b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_muon_100GeV-000100.json @@ -0,0 +1,14 @@ +{ + "file_length": 2000, + "geom": "TestBeamMC", + "mass": 105.66, + "maxE": 100.0, + "minE": 100.0, + "pid": [-13, 13], + "radius": -100.0, + "run": 100, + "sampler": "const", + "segment": 0, + "short": "TB_PG_muon_100GeV", + "zpos": -1000.0 +} diff --git a/Control/CalypsoExample/Generation/python/faser_parser.py b/Control/CalypsoExample/Generation/python/faser_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..43f98387e7089cfdf258531ec57aeea072b70f53 --- /dev/null +++ b/Control/CalypsoExample/Generation/python/faser_parser.py @@ -0,0 +1,175 @@ +# +# Copyright (C) 2022 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2022 CERN for the benefit of the FASER collaboration +# +# Parser function for particle gun samples +# +def faser_pgparser(): + + import sys + import json + import argparse + + parser = argparse.ArgumentParser(description="Run FASER ParticleGun Simulation") + + parser.add_argument("--conf", action='append', + help="Specify configuration file with default values") + parser.add_argument("--geom", default="TI12MC", + help="Specify geomtery to simulation (default: TI12MC, alt: TestBeamMC)") + + parser.add_argument("--run", default=123456, type=int, + help="Run number to generate") + parser.add_argument("--segment", default=00000, type=int, + help="Segment number to generate") + parser.add_argument("--file_length", default=1000, type=int, + help="Total events per file segement") + + parser.add_argument("--short", default="PG_logE", + help="Short description for filename") + parser.add_argument("--tag", default=None, + help="Generator tag (e.g.: g0000)") + + parser.add_argument("--pid", default=[-13, 13], type=int, nargs='*', + help="Specify PDG ID of particle (note plus/minus different) or list (e.g.: --pid -13 13)") + parser.add_argument("--mass", default=105.66, type=float, + help="Specify particle mass (in MeV)") + parser.add_argument("--radius", default=100., type=float, + help="Specify radius (in mm)") + parser.add_argument("--angle", default=0.005, type=float, + help="Specify angular width (in Rad)") + parser.add_argument("--zpos", default=None, type=float, + help="Specify z position of particles (in mm) (helpful to avoid FASERnu)") + + parser.add_argument("--sampler", default="log", + help="Specify energy sampling (log, lin, const)") + parser.add_argument("--minE", default=10., type=float, + help="Minimum energy in GeV (for log or lin sampler)") + parser.add_argument("--maxE", default=1000., type=float, + help="Maximum energy (or constant) in GeV") + + parser.add_argument("--nevts", default=-1, type=int, + help="Number of events to generate (for debugging)") + parser.add_argument("--dump", action='store_true', + help="Write out full configuration") + parser.add_argument("--noexec", action='store_true', + help="Exit after parsing configuration (no execution)") + + pg_args = parser.parse_args() + + # Get defaults + if pg_args.conf is not None: + for conf_fname in pg_args.conf: + with open(conf_fname, 'r') as f: + parser.set_defaults(**json.load(f)) + + # Reload arguments to override config file with command line + pg_args = parser.parse_args() + + # Print out configuration if requested + if pg_args.dump: + tmp_args = vars(pg_args).copy() + del tmp_args['dump'] # Don't dump the dump value + del tmp_args['conf'] # Don't dump the conf file name either + del tmp_args['nevts'] # Debugging, not part of configuration + del tmp_args['noexec'] # Debugging, not part of configuration + print("Configuration:") + print(json.dumps(tmp_args, indent=4, sort_keys=False)) + + if pg_args.noexec: + sys.exit(0) + + # + # Add some derived quantities + # + + # Create the file name also (could add gentag here) + pg_args.outfile = f"FaserMC-{pg_args.short}-{pg_args.run:06}-{pg_args.segment:05}" + + if pg_args.tag: + pg_args.outfile += f"-{pg_args.tag}" + + pg_args.outfile += "-HITS.root" + + return pg_args + +# All done + +# +# Parser function for Foresee samples +# +def faser_fsparser(): + + import sys + import json + import argparse + + parser = argparse.ArgumentParser(description="Run FASER Foresee Simulation") + + parser.add_argument("--conf", action='append', + help="Specify configuration file with default values") + parser.add_argument("--run", default=123456, type=int, + help="Run number to generate") + parser.add_argument("--segment", default=00000, type=int, + help="Segment number to generate") + parser.add_argument("--file_length", default=1000, type=int, + help="Total events per file segement") + + parser.add_argument("--model", help="Model name") + parser.add_argument("--model_path", help="Path to model phase space file") + + parser.add_argument("--short", default="PG_logE", + help="Short description for filename") + parser.add_argument("--tag", default=None, + help="Generator tag (g0000)") + + parser.add_argument("--pid", default=[-13, 13], type=int, nargs=2, + help="Specify PDG ID of daughter particles") + parser.add_argument("--mass", default=105.66, type=float, + help="Specify particle mass (in MeV)") + + parser.add_argument("--nevts", default=-1, type=int, + help="Number of events to generate (for debugging)") + parser.add_argument("--dump", action='store_true', + help="Write out full configuration") + parser.add_argument("--noexec", action='store_true', + help="Exit after parsing configuration (no execution)") + + fs_args = parser.parse_args() + + # Get defaults + if fs_args.conf is not None: + for conf_fname in fs_args.conf: + with open(conf_fname, 'r') as f: + parser.set_defaults(**json.load(f)) + + # Reload arguments to override config file with command line + fs_args = parser.parse_args() + + # Print out configuration if requested + if fs_args.dump: + tmp_args = vars(fs_args).copy() + del tmp_args['dump'] # Don't dump the dump value + del tmp_args['conf'] # Don't dump the conf file name either + del tmp_args['nevts'] # Debugging, not part of configuration + del tmp_args['noexec'] # Debugging, not part of configuration + print("Configuration:") + print(json.dumps(tmp_args, indent=4, sort_keys=False)) + + if fs_args.noexec: + sys.exit(0) + + # + # Add some derived quantities + # + + # Create the file name also (could add gentag here) + fs_args.outfile = f"FaserMC-{fs_args.short}-{fs_args.run:06}-{fs_args.segment:05}" + + if fs_args.tag: + fs_args.outfile += f"-{fs_args.tag}" + + fs_args.outfile += "-HITS.root" + + return fs_args + +# All done diff --git a/Control/CalypsoExample/Generation/scripts/faserMDC_foresee.py b/Control/CalypsoExample/Generation/scripts/faserMDC_foresee.py index b48bbf861ed4bbac6e0a6fb6cc9fd95da5049138..f16b6a40a0413278097824bb7af63b51811eaa9c 100755 --- a/Control/CalypsoExample/Generation/scripts/faserMDC_foresee.py +++ b/Control/CalypsoExample/Generation/scripts/faserMDC_foresee.py @@ -124,7 +124,7 @@ if __name__ == '__main__': # # MDC geometry configuration # - detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion'] + detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion', 'Trench'] # # Setup detector flags # diff --git a/Control/CalypsoExample/Generation/scripts/faserMDC_particlegun.py b/Control/CalypsoExample/Generation/scripts/faserMDC_particlegun.py index 12fe153bf0ccc712c8b72992bf27a60260b84bc4..98975a548601209a81b932c7cbbbe03da7184035 100755 --- a/Control/CalypsoExample/Generation/scripts/faserMDC_particlegun.py +++ b/Control/CalypsoExample/Generation/scripts/faserMDC_particlegun.py @@ -135,7 +135,7 @@ if __name__ == '__main__': # # MDC geometry configuration # - detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion'] + detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion', 'Trench'] # # Setup detector flags # diff --git a/Control/CalypsoExample/Generation/scripts/faser_particlegun.py b/Control/CalypsoExample/Generation/scripts/faser_particlegun.py index ee38005df79c3f6aab97b5c800f3949521110a02..6abdfdd4a8c8ae18547462b68532bd51cc4a9d71 100755 --- a/Control/CalypsoExample/Generation/scripts/faser_particlegun.py +++ b/Control/CalypsoExample/Generation/scripts/faser_particlegun.py @@ -2,27 +2,36 @@ """ Produce particle gun samples Derived from G4FaserAlgConfigNew -This is a general low-level script, -although this could be useful for testing. Usage: -faser_particlegun.py <options> - -Control using command-line options: ---evtMax=1000 ---skipEvt=1000 - -Output.HITSFileName=<name> -Sim.Gun='{"pid" : 11, "z": -1500.}' +faser_particlegun.py --conf=<config_file> Copyright (C) 2002-2021 CERN for the benefit of the ATLAS and FASER collaborations """ if __name__ == '__main__': + import sys import time a = time.time() # +# Parse command-line options +# + from Generation.faser_parser import faser_pgparser + args = faser_pgparser() +# +# Figure out events to run and skip +# + nskipped = args.segment*args.file_length + if args.nevts > 0: + nevents = args.nevts + else: + nevents = args.file_length +# +# Print out what we are doing +# + print(f"Generating {nevents} evnts into file {args.outfile}") +# # Set up logging and config behaviour # from AthenaCommon.Logging import log @@ -34,21 +43,22 @@ if __name__ == '__main__': # Import and set config flags # from CalypsoConfiguration.AllConfigFlags import ConfigFlags - ConfigFlags.Exec.MaxEvents = 10 # can be overridden from command line with --evtMax=<number> - ConfigFlags.Exec.SkipEvents = 0 # can be overridden from command line with --skipEvt=<number> + ConfigFlags.Exec.MaxEvents = nevents + ConfigFlags.Exec.SkipEvents = nskipped from AthenaConfiguration.Enums import ProductionStep ConfigFlags.Common.ProductionStep = ProductionStep.Simulation # # All these must be specified to avoid auto-configuration # - ConfigFlags.Input.RunNumber = [12345] #Isn't updating - todo: investigate + ConfigFlags.Input.RunNumber = [args.run] ConfigFlags.Input.OverrideRunNumber = True - ConfigFlags.Input.LumiBlockNumber = [1] + ConfigFlags.Input.LumiBlockNumber = [(args.segment+1)] ConfigFlags.Input.isMC = True + ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions # # Output file name # - ConfigFlags.Output.HITSFileName = "my.HITS.pool.root" # can be overridden from command line with Output.HITSFileName=<name> + ConfigFlags.Output.HITSFileName = args.outfile # # Sim ConfigFlags # @@ -59,46 +69,86 @@ if __name__ == '__main__': ConfigFlags.addFlag("Sim.Gun",{"Generator" : "SingleParticle"}) # Property bag for particle gun keyword:argument pairs ConfigFlags.addFlag("Sim.Beam.xangle", 0) # Potential beam crossing angles ConfigFlags.addFlag("Sim.Beam.yangle", 0) - - ConfigFlags.GeoModel.FaserVersion = "FASERNU-02" # Geometry set-up - ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" # Conditions set-up - ConfigFlags.addFlag("Input.InitialTimeStamp", 0) # To avoid autoconfig + ConfigFlags.addFlag("Sim.Beam.xshift", 0) # Potential beam shift + ConfigFlags.addFlag("Sim.Beam.yshift", 0) + + if args.geom == "TI12MC": + # 2022 TI12 geometry + ConfigFlags.GeoModel.FaserVersion = "FASERNU-02" # Geometry set-up + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" # Conditions set-up + # TI12 detectors + detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', + 'Dipole', 'Emulsion', 'Trench'] + + elif args.geom == "TestBeamMC": + # Define 2021 test beam geometry + ConfigFlags.GeoModel.FaserVersion = "FASER-TB00" # Geometry set-up + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-TB00" # Conditions set-up + # Testbeam detectors (trigger layers are actually veto counters) + detectors = ['Veto', 'Preshower', 'FaserSCT', 'Ecal'] + + else: + print(f"Unknown geometry {args.geom}!") + sys.exit(1) + + ConfigFlags.addFlag("Input.InitialTimeStamp", 0) # To avoid autoconfig ConfigFlags.GeoModel.Align.Dynamic = False - # # Preset particle gun parameters # - from math import atan + import ParticleGun as PG from AthenaCommon.SystemOfUnits import GeV, TeV, cm, m from AthenaCommon.PhysicalConstants import pi - # 11 - electron, 13 - muon, 22 - photon - import ParticleGun as PG - ConfigFlags.Sim.Gun = { - "Generator" : "SingleParticle", "pid" : 13, - "energy" : PG.LogSampler(10*GeV, 1*TeV), - "theta" : PG.GaussianSampler(0, atan((10*cm)/(7*m)), oneside = True), - "phi" : [0, 2*pi], "mass" : 0.511, "radius" : -10*cm, #"z": -2.0*m, - "randomSeed" : 12345} - -# -# Command-line overrides -# -# These have the format: Sim.Gun='{"pid" : 11}' -# Filename: Output.HITSFileName="test.muon.001.root" -# Also number of events: --evtMax 100 -# Starting at z = -1.5m (-1500.) will miss the veto (for electrons) - - import sys - ConfigFlags.fillFromArgs(sys.argv[1:]) - + if isinstance(args.pid, list): + # Note args.pid is a list, must make this a set for ParticleGun + pidarg = set(args.pid) + else: + # Just pass a single value + pidarg = args.pid + + print(f"Using pid: {args.pid} => {pidarg}") + + # Create the simgun dictionary + # Negative radius gives uniform sampling + # Positive radius gives Gaussian sampling + sg_dict = { + "Generator" : "SingleParticle", + "pid" : pidarg, "mass" : args.mass, + "theta" : PG.GaussianSampler(0, args.angle, oneside = True), + "phi" : [0, 2*pi], "radius" : args.radius, + "randomSeed" : args.outfile + } + + # -1000 is safely upstream of detector (to be checked) + # Note zpos is in mm! + if args.zpos: + sg_dict["z"] = args.zpos + + # Determine energy sampling + if args.sampler == "lin": + sg_dict["energy"] = PG.UniformSampler(args.minE*GeV, args.maxE*GeV) + elif args.sampler == "log": + sg_dict["energy"] = PG.LogSampler(args.minE*GeV, args.maxE*GeV) + elif args.sampler == "const": + sg_dict["energy"] = PG.ConstSampler(args.maxE*GeV) + else: + print(f"Sampler {args.sampler} not known!") + sys.exit(1) + + # Pass this in one go to ConfigFlags + ConfigFlags.Sim.Gun = sg_dict + + doShiftLOS = (ConfigFlags.Sim.Beam.xangle or ConfigFlags.Sim.Beam.yangle or + ConfigFlags.Sim.Beam.xshift or ConfigFlags.Sim.Beam.yshift) + + if doShiftLOS: + pgConfig = ConfigFlags.Sim.Gun + pgConfig["McEventKey"] = "BeamTruthEvent_ATLASCoord" + ConfigFlags.Sim.Gun = pgConfig # # By being a little clever, we can steer the geometry setup from the command line using GeoModel.FaserVersion # -# MDC configuration -# - detectors = ['Veto', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion'] -# # Setup detector flags # from CalypsoConfiguration.DetectorConfigFlags import setupDetectorsFromList @@ -134,20 +184,22 @@ if __name__ == '__main__': # Shift LOS # - if ConfigFlags.Sim.Beam.xangle or ConfigFlags.Sim.Beam.yangle: - MCEventKey = "BeamTruthEventShifted" + if doShiftLOS: import McParticleEvent.Pythonizations from GeneratorUtils.ShiftLOSConfig import ShiftLOSCfg - cfg.merge(ShiftLOSCfg(ConfigFlags, OutputMCEventKey = MCEventKey, - xcross = ConfigFlags.Sim.Beam.xangle, ycross = ConfigFlags.Sim.Beam.yangle)) - else: - MCEventKey = "BeamTruthEvent" + + cfg.merge(ShiftLOSCfg(ConfigFlags, + xcross = ConfigFlags.Sim.Beam.xangle, + ycross = ConfigFlags.Sim.Beam.yangle, + xshift = ConfigFlags.Sim.Beam.xshift, + yshift = ConfigFlags.Sim.Beam.yshift)) + # # Add the G4FaserAlg # from G4FaserAlg.G4FaserAlgConfigNew import G4FaserAlgCfg - cfg.merge(G4FaserAlgCfg(ConfigFlags, InputTruthCollection = MCEventKey)) + cfg.merge(G4FaserAlgCfg(ConfigFlags)) # # Dump config # diff --git a/Control/CalypsoExample/Generation/scripts/submit_faserMDC_foresee.sh b/Control/CalypsoExample/Generation/scripts/submit_faserMDC_foresee.sh index 723ffb6d50b0243878b261639cc1bab461b7da97..94d9ab8b85e2dbf30498662178e599a519641951 100755 --- a/Control/CalypsoExample/Generation/scripts/submit_faserMDC_foresee.sh +++ b/Control/CalypsoExample/Generation/scripts/submit_faserMDC_foresee.sh @@ -202,7 +202,7 @@ then ls -l echo "copy *-HITS.root to $outdest" mkdir -p $outdest - eos cp *-HITS.root $outdest + eos cp *-HITS.root ${outdest}/ || true fi # # Also copy log file @@ -212,12 +212,12 @@ then ls -l echo "copy $logfile to $logdest" mkdir -p $logdest - eos cp $logfile $logdest + eos cp $logfile $logdest/$logfile elif ! [ -z "$outdest" ] then cd .. ls -l echo "copy $logffile to $outdest" mkdir -p $outdest - eos cp $logfile $outdest + eos cp $logfile $outdest/$logfile fi diff --git a/Control/CalypsoExample/Generation/scripts/submit_faserMDC_particlegun.sh b/Control/CalypsoExample/Generation/scripts/submit_faserMDC_particlegun.sh index 729d75c751e33146a2af334315392d3b16692106..5d704e9a51445d2ae408512c4eb0df83c71a8b39 100755 --- a/Control/CalypsoExample/Generation/scripts/submit_faserMDC_particlegun.sh +++ b/Control/CalypsoExample/Generation/scripts/submit_faserMDC_particlegun.sh @@ -200,7 +200,7 @@ then ls -l echo "copy *-HITS.root to $outdest" mkdir -p $outdest - eos cp *-HITS.root $outdest + eos cp *-HITS.root ${outdest}/ || true fi # # Also copy log file @@ -210,12 +210,12 @@ then ls -l echo "copy $logfile to $logdest" mkdir -p $logdest - eos cp $logfile $logdest + eos cp $logfile $logdest/$logfile elif ! [ -z "$outdest" ] then cd .. ls -l echo "copy $logfile to $outdest" mkdir -p $outdest - eos cp $logfile $outdest + eos cp $logfile $outdest/$logfile fi diff --git a/Control/CalypsoExample/Generation/scripts/submit_faser_particlegun.sh b/Control/CalypsoExample/Generation/scripts/submit_faser_particlegun.sh new file mode 100755 index 0000000000000000000000000000000000000000..ec13600f7a94ce8f1958652f83a6e36f72b4563f --- /dev/null +++ b/Control/CalypsoExample/Generation/scripts/submit_faser_particlegun.sh @@ -0,0 +1,221 @@ +#!/bin/bash +# Used with a condor file to submit to vanilla universe +# +# Usage: +# submit_faser_particlegun.sh config_file segment [release_directory] [working_directory] +# +# Options: +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file +# +# config_file - full file name (with path) +# segment - segment number (file segment) +# release_directory - optional path to release install directory (default pwd) +# working_directory - optional path to output directory location (default pwd) +# +# Afterwards, the output file will be copied to the directory specified in working_directory +# +# The release directory must already be set up +# (so an unqualified asetup can set up the release properly) +# +# Script will use git describe to find the release tag. +# If this matches gen/g???? or sim/s???? it will be passed to the job +# +#---------------------------------------- +# Keep track of time +SECONDS=0 +# +# Parse options +while [ -n "$1" ] +do + case "$1" in + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done +# +# Parse command-line options +config_path=${1} +segment=${2} +release_directory=${3} +working_directory=${4} +# +# Set defaults if arguments aren't provided +if [ -z "$config_path" ]; then + echo "No config_path specified!" + echo "Usage: submit_faser_particlegun.sh config_file segment [release dir] [output dir]" + exit 1 +fi +# Check if relative path (only works run interactively) +if ! [[ ${config_path::1} == "/" ]]; then + echo "config_path should be absolute!" + config_path=`pwd`/${1} + echo "Using: $config_path" +fi +# +if [ -z "$segment" ]; then + segment=0 +fi +# +if [ -z "$release_directory" ]; then + release_directory=`pwd` +fi +# +if [ -z "$working_directory" ]; then + working_directory=`pwd` +fi +# +# Apply padding to segment number +printf -v seg_str "%05d" $segment +# +starting_directory=`pwd` +# +# Now extract the file stem +# +# First, get the filename +config_file=$(basename "$config_path") +# +# Now split based on '.' to get stem +defaultIFS=$IFS +IFS='.' +read config_file_stem ext <<< "$config_file" +# +# Try to find the run number +IFS='-' +# Read the split words into an array based on delimeter +read faser short run_number <<< "$config_file_stem" +# +# Set the IFS delimeter back or else echo doesn't work... +IFS=$defaultIFS +# +# Check if we found a number, use full config name if not +output_directory="$working_directory/${run_number}" +re='^[0-9]+$' +if ! [[ $run_number =~ $re ]] ; then + # Not a number... + output_directory="$working_directory/${config_file_stem}" +fi +# +# Make output directory if needed +mkdir -p "$output_directory" +# +# This magic redirects everything in this script to our log file +logfile="${config_file_stem}-${seg_str}.gen.log" +exec >& "$output_directory/${logfile}" +echo `date` - $HOSTNAME +echo "File: $config_file" +echo "Segment: $seg_str" +echo "Release: $release_directory" +echo "Output: $output_directory" +echo "Starting: $starting_directory" +# +# Set up the release (do this automatically)? +export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase +source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh +# +# Try automatic +# Always go back to the starting directory in case paths are relative +cd "$starting_directory" +cd "$release_directory" +# This doesn't seem to work, as we need the --input argument +#asetup +#source build/x8*/setup.sh +# +# Do this by hand +asetup --input=calypso/asetup.faser Athena,22.0.49 +source build/x86*/setup.sh +# +# +# Try to find a release tag +cd calypso +gentag=`git describe` +if [[ "$gentag" == "gen/g"???? ]]; then + tag=`echo "$gentag" | cut -c 5-10` + echo "Found gen tag: $tag" +fi +if [[ "$gentag" == "sim/s"???? ]]; then + tag=`echo "$gentag" | cut -c 5-10` + echo "Found sim tag: $tag" +fi +if [[ "$gentag" == "digi/d"???? ]]; then + tag=`echo "$gentag" | cut -c 6-11` + echo "Found digi tag: $tag" +fi +if [[ "$gentag" == "reco/r"???? ]]; then + tag=`echo "$gentag" | cut -c 6-11` + echo "Found reco tag: $tag" +fi +# +# Move to the run directory +cd "$starting_directory" +cd "$output_directory" +# +# Remove any previous directory if it exists +#if [[ -e "$file_stem" ]]; then +# echo "Remove previous directory $file_stem" +# rm -rf "$file_stem" +#fi +# +# Make run directory +if [[ -e "${config_file_stem}-${seg_str}" ]]; then + echo "Directory ${config_file_stem}-${seg_str} already exists" +else + mkdir "${config_file_stem}-${seg_str}" +fi +cd "${config_file_stem}-${seg_str}" +# +# Run job +if [[ -z "$tag" ]]; then + faser_particlegun.py "--conf=$config_path" "--segment=$seg_str" +else + faser_particlegun.py "--conf=$config_path" "--segment=$seg_str" "--tag=$tag" +fi +# +# Print out ending time +date +echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-HITS.root to $outdest" + mkdir -p $outdest + eos cp *-HITS.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Reconstruction/CMakeLists.txt b/Control/CalypsoExample/Reconstruction/CMakeLists.txt index 5dbb5a46c9943df6d18019963f0387aa3c608c9b..23af9627f673516d4e78d17a75596770ca6e3b4b 100644 --- a/Control/CalypsoExample/Reconstruction/CMakeLists.txt +++ b/Control/CalypsoExample/Reconstruction/CMakeLists.txt @@ -18,16 +18,15 @@ atlas_install_python_modules( python/*.py ) atlas_install_scripts( scripts/*.sh scripts/*.py ) atlas_add_test( ProdRecoTI12 - SCRIPT scripts/faser_reco.py ${CMAKE_CURRENT_SOURCE_DIR}/../rawdata/Faser-Physics-001920-filtered.raw TI12Data + SCRIPT scripts/faser_reco.py --geom=TI12Data ${CMAKE_CURRENT_SOURCE_DIR}/../rawdata/Faser-Physics-001920-filtered.raw PROPERTIES TIMEOUT 300 ) -# Turn this off until we figure out the CKF behavior on testbeam data atlas_add_test( ProdRecoTestBeam - SCRIPT scripts/faser_reco.py ${CMAKE_CURRENT_SOURCE_DIR}/../RAWDATA/Faser-Physics-003613-filtered.raw TestBeamData + SCRIPT scripts/faser_reco.py --geom=TestBeamData ${CMAKE_CURRENT_SOURCE_DIR}/../RAWDATA/Faser-Physics-003613-filtered.raw PROPERTIES TIMEOUT 300 ) atlas_add_test( ProdRecoPilotTracks - SCRIPT scripts/faser_reco.py ${CMAKE_CURRENT_SOURCE_DIR}/../RAWDATA/Faser-Physics-pilot_tracks-filtered.raw TI12Data + SCRIPT scripts/faser_reco.py --geom=TI12Data ${CMAKE_CURRENT_SOURCE_DIR}/../RAWDATA/Faser-Physics-pilot_tracks-filtered.raw PROPERTIES TIMEOUT 300 ) # Test of TI12Data02 geometry (should auto-select from run number) diff --git a/Control/CalypsoExample/Reconstruction/scripts/faser_reco.py b/Control/CalypsoExample/Reconstruction/scripts/faser_reco.py index e140844b5d80b73991cd66f55a912f347c01e4d1..b4568e9051de5fcafc3d0e4d6965cca13184a9c8 100755 --- a/Control/CalypsoExample/Reconstruction/scripts/faser_reco.py +++ b/Control/CalypsoExample/Reconstruction/scripts/faser_reco.py @@ -2,16 +2,16 @@ # # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration # Run with: -# ./faser_reco.py filepath [runtype] +# ./faser_reco.py [--geom=runtype] filepath # # filepath - fully qualified path, including url if needed, to the input raw data file # example: "root://hepatl30//atlas/local/torrence/faser/commissioning/TestBeamData/Run-004150/Faser-Physics-004150-00000.raw" # # runtype - optionally specify the data type (TI12Data, TI12Data02, TI12Data03 or TestBeamData). -# In a normal file system location, this will be extracted from the directory name, -# but runtype will override this assignment. -# TI12Data02 is needed for the IFT geometry. Script will auto-detect this if read -# from normal file system location. +# +# Options: +# --isMC - needed to reconstruct MC data +# --testBeam - shortcut to specify testbeam geometry # import sys import time @@ -23,8 +23,8 @@ parser = argparse.ArgumentParser(description="Run FASER reconstruction") parser.add_argument("file_path", help="Fully qualified path of the raw input file") -parser.add_argument("run_type", nargs="?", default="", - help="Specify run type (if it can't be parsed from path)") +parser.add_argument("-g", "--geom", default="", + help="Specify geometry (if it can't be parsed from run number)\n Values: TI12Data03 (2022 TI12)") parser.add_argument("-r", "--reco", default="", help="Specify reco tag (to append to output filename)") parser.add_argument("-n", "--nevents", type=int, default=-1, @@ -33,7 +33,8 @@ parser.add_argument("-v", "--verbose", action='store_true', help="Turn on DEBUG output") parser.add_argument("--isMC", action='store_true', help="Running on digitised MC rather than data") - +parser.add_argument("--testBeam", action='store_true', + help="Set geometry for 2021 test beam") args = parser.parse_args() @@ -42,12 +43,14 @@ from pathlib import Path filepath=Path(args.file_path) # runtype has been provided -if len(args.run_type) > 0: - runtype=args.run_type +if len(args.geom) > 0: + runtype=args.geom + +# Shortcut for testbeam +elif args.testBeam: + print(f"Use 2021 TestBeam configuration") + runtype = "TestBeamData" -# Extract runtype from path -# Should be directory above run -# i.e.: TestBeamData/Run-004150/Faser-Physics-004150-00000.raw" else: runtype = "TI12Data03" @@ -55,9 +58,9 @@ else: # Try to pick correct geometry from run number # This won't work for testbeam data, # so lets call this a hack for now - runname = filepath.parts[-2] + runname = filepath.parts[-1] try: - runnumber = int(runname.split('-')[1]) + runnumber = int(runname.split('-')[2]) except Exception as e: print(f"Failed to find run number in {filepath}") print(f"Couldn't parse {runname}") @@ -90,7 +93,10 @@ Configurable.configurableRun3Behavior = True # Flags for this job ConfigFlags.Input.isMC = args.isMC # Needed to bypass autoconfig -ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions for now +if args.isMC: + ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions +else: + ConfigFlags.IOVDb.DatabaseInstance = "CONDBR3" # Use data conditions ConfigFlags.Input.ProjectName = "data20" ConfigFlags.GeoModel.Align.Dynamic = False @@ -105,7 +111,7 @@ if runtype == "TI12Data": ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-01" # Testbeam setup -elif runtype == "TestBeamData" or runtype == "TestBeam2021": +elif runtype == "TestBeamData" or runtype == "TestBeamMC": ConfigFlags.GeoModel.FaserVersion = "FASER-TB00" ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-TB00" useCKF = False @@ -123,13 +129,17 @@ elif runtype == "TI12Data03": else: print("Invalid run type found:", runtype) print("Specify correct type or update list") - sys.exit(-1) + sys.exit(1) # Must use original input string here, as pathlib mangles double // in path names ConfigFlags.Input.Files = [ args.file_path ] filestem = filepath.stem +# Remove any filetype modifier +if filestem[-4:] == "-RDO": + filestem = filestem[:-4] + if len(args.reco) > 0: filestem += f"-{args.reco}" @@ -218,7 +228,11 @@ itemList = [ "xAOD::EventInfo#*" ] # if args.isMC: - # Add truth records here? + # Make xAOD versions of truth + from Reconstruction.xAODTruthCnvAlgConfig import xAODTruthCnvAlgCfg + acc.merge(xAODTruthCnvAlgCfg(ConfigFlags)) + + # Add MC information here itemList.extend( ["McEventCollection#*", "TrackerSimDataCollection#*"] ) acc.merge(OutputStreamCfg(ConfigFlags, "xAOD", itemList)) @@ -244,9 +258,9 @@ if not args.isMC: replicaSvc.UseGeomSQLite = True # Configure verbosity -ConfigFlags.dump() if args.verbose: acc.foreach_component("*").OutputLevel = VERBOSE + ConfigFlags.dump() else: acc.foreach_component("*").OutputLevel = INFO diff --git a/Control/CalypsoExample/Reconstruction/scripts/submit_faserMDC_reco.sh b/Control/CalypsoExample/Reconstruction/scripts/submit_faserMDC_reco.sh index 819cfeb49014f33f1bde93fdd53b22c2b075e795..5dacef1cf4152e0a1cd817d7e52e9c38c65a1fb9 100755 --- a/Control/CalypsoExample/Reconstruction/scripts/submit_faserMDC_reco.sh +++ b/Control/CalypsoExample/Reconstruction/scripts/submit_faserMDC_reco.sh @@ -175,7 +175,8 @@ then ls -l echo "copy *-RDO.root to $outdest" mkdir -p $outdest - eos cp *-xAOD.root $outdest + # Keep this line from stopping script, so we might get a log file + eos cp *-xAOD.root ${outdest}/ || true fi # # Also copy log file @@ -185,12 +186,12 @@ then ls -l echo "copy $logfile to $logdest" mkdir -p $logdest - eos cp $logfile $logdest + eos cp $logfile $logdest/$logfile elif ! [ -z "$outdest" ] then cd .. ls -l echo "copy $logfile to $outdest" mkdir -p $outdest - eos cp $logfile\ $outdest + eos cp $logfile $outdest/$logfile fi diff --git a/Control/CalypsoExample/Reconstruction/scripts/submit_faser_reco.sh b/Control/CalypsoExample/Reconstruction/scripts/submit_faser_reco.sh index 81db602a2c7be2e9430b6d3918a13f3d20d12e10..6924506133cddacd3ad7f4f88aec64121719b9b8 100755 --- a/Control/CalypsoExample/Reconstruction/scripts/submit_faser_reco.sh +++ b/Control/CalypsoExample/Reconstruction/scripts/submit_faser_reco.sh @@ -7,6 +7,8 @@ # Options: # --out - specify output location (in EOS) to copy output HITS file # --log - specify output location (in EOS) for log file +# --geom - specify geometry +# --isMC - needed for MC reco # # file_path - full file name (with path) # release_directory - optional path to release install directory (default pwd) @@ -37,6 +39,15 @@ do shift; shift;; + -g | --geom) + geom="$2"; + shift; + shift;; + + --isMC) + ismc=1 + shift;; + --) # End of options shift; # Eat this break;; # And stop parsing @@ -106,6 +117,8 @@ logfile="${file_stem}.rec.log" exec >& "$output_directory/$logfile" echo `date` - $HOSTNAME echo "File: $file_name" +echo "Filepath: $file_path" +echo "Geom: $geom" echo "Release: $release_directory" echo "Output: $output_directory" echo "Starting: $starting_directory" @@ -123,8 +136,8 @@ cd "$release_directory" # # Do this by hand asetup --input=calypso/asetup.faser Athena,22.0.49 -source build/x86*/setup.sh -# +source run/setup.sh +#source build/x86*/setup.sh # # Try to find a release tag cd calypso @@ -156,13 +169,27 @@ else fi cd "$file_stem" # -# Run job, || True ensures script continues even if job fails +# Run job if [[ -z "$tag" ]]; then - faser_reco.py "--nevents=$nevents" "$file_path" || True + tagstr="" else - faser_reco.py "--nevents=$nevents" "--reco=$tag" "$file_path" || True + tagstr="--reco=$tag" fi # +if [[ -z "$geom" ]]; then + geomstr="" +else + geomstr="--geom $geom" +fi +# +if [[ -z "$ismc" ]]; then + mcstr="" +else + mcstr="--isMC" +fi +# +faser_reco.py "--nevents=$nevents" $geomstr $tagstr $mcstr "$file_path" +# # Print out ending time date echo "Job finished after $SECONDS seconds" @@ -173,25 +200,30 @@ export EOS_MGM_URL=root://eospublic.cern.ch # Now copy output file if ! [ -z "$outdest" ] then + echo "Output directory:" ls -l - echo "copy *-RDO.root to $outdest" - mkdir -p $outdest - eos cp *-xAOD.root $outdest || True + echo "copy *-xAOD.root to $outdest" + eos mkdir -p $outdest + # Keep this line from stopping script, so we might get a log file + # || true ensures script continues even if copy fails + eos cp *-xAOD.root ${outdest}/ || true fi # # Copy log file second if ! [ -z "$logdest" ] then cd .. + echo "Working directory:" ls -l echo "copy $logfile to $logdest" - mkdir -p $logdest - eos cp $logfile $logdest + eos mkdir -p $logdest + eos cp $logfile $logdest/$logfile elif ! [ -z "$outdest" ] then cd .. + echo "Working directory:" ls -l echo "copy $logfile to $outdest" - mkdir -p $outdest - eos cp $logfile\ $outdest + eos mkdir -p $outdest + eos cp $logfile $outdest/$logfile fi diff --git a/Control/CalypsoExample/Simulation/scripts/faserMDC_simulate.py b/Control/CalypsoExample/Simulation/scripts/faserMDC_simulate.py index 12de492f4070237b59dda273673b994bbc275d37..0fd9622db5de8630ffaef34bd2ae4b6eabe88a5c 100755 --- a/Control/CalypsoExample/Simulation/scripts/faserMDC_simulate.py +++ b/Control/CalypsoExample/Simulation/scripts/faserMDC_simulate.py @@ -160,7 +160,7 @@ if __name__ == '__main__': # # MDC geometry configuration # - detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion'] + detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion', 'Trench'] # # Setup detector flags # diff --git a/Control/CalypsoExample/Simulation/scripts/submit_faserMDC_simulate.sh b/Control/CalypsoExample/Simulation/scripts/submit_faserMDC_simulate.sh index bedbb59174d22cb1e263d79be03670d8bba097a1..03b8a836e5c07c336165f5e30a577aafc9aab76b 100755 --- a/Control/CalypsoExample/Simulation/scripts/submit_faserMDC_simulate.sh +++ b/Control/CalypsoExample/Simulation/scripts/submit_faserMDC_simulate.sh @@ -199,7 +199,7 @@ then ls -l echo "copy *-HITS.root to $outdest" mkdir -p $outdest - eos cp *-HITS.root $outdest + eos cp *-HITS.root ${outdest}/ || true fi # # Also copy log file @@ -209,13 +209,13 @@ then ls -l echo "copy $logfile to $logdest" mkdir -p $logdest - eos cp $logfile $logdest + eos cp $logfile $logdest/$logfile elif ! [ -z "$outdest" ] then cd .. ls -l echo "copy $logfile to $outdest" mkdir -p $outdest - eos cp $logfile $outdest + eos cp $logfile $outdest/$logfile fi diff --git a/Database/ConnectionManagement/FaserAuthentication/data/dblookup.xml b/Database/ConnectionManagement/FaserAuthentication/data/dblookup.xml index 04566dcf0e64d66ef8ce021b59693287156b0b6b..6238757194d2117e5384711f9751eaa6fee1495a 100644 --- a/Database/ConnectionManagement/FaserAuthentication/data/dblookup.xml +++ b/Database/ConnectionManagement/FaserAuthentication/data/dblookup.xml @@ -3,32 +3,37 @@ <logicalservice name="FASERDD"> <service name="sqlite_file:data/geomDB/geomDB_sqlite" accessMode="read" /> - <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.0.9/geomDB/geomDB_sqlite" accessMode="read" /> + <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.1.0/geomDB/geomDB_sqlite" accessMode="read" /> </logicalservice> <logicalservice name="COOLOFL_SCT"> <service name="sqlite_file:data/sqlite200/ALLP200.db" accessMode="read" /> - <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.0.9/sqlite200/ALLP200.db" accessMode="read" /> + <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.1.0/sqlite200/ALLP200.db" accessMode="read" /> </logicalservice> <logicalservice name="COOLOFL_DCS"> <service name="sqlite_file:data/sqlite200/ALLP200.db" accessMode="read" /> - <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.0.9/sqlite200/ALLP200.db" accessMode="read" /> + <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.1.0/sqlite200/ALLP200.db" accessMode="read" /> </logicalservice> <logicalservice name="COOLOFL_GLOBAL"> <service name="sqlite_file:data/sqlite200/ALLP200.db" accessMode="read" /> - <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.0.9/sqlite200/ALLP200.db" accessMode="read" /> + <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.1.0/sqlite200/ALLP200.db" accessMode="read" /> </logicalservice> <logicalservice name="COOLOFL_TDAQ"> <service name="sqlite_file:data/sqlite200/CABP200.db" accessMode="read" /> - <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.0.9/sqlite200/CABP200.db" accessMode="read" /> + <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.1.0/sqlite200/CABP200.db" accessMode="read" /> </logicalservice> <logicalservice name="COOLOFL_TRIGGER"> - <service name="sqlite_file:data/sqlite200/waveform_reco.db" accessMode="read" /> - <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.0.9/sqlite200/waveform_reco.db" accessMode="read" /> + <service name="sqlite_file:data/sqlite200/ALLP200.db" accessMode="read" /> + <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.1.0/sqlite200/ALLP200.db" accessMode="read" /> </logicalservice> + <logicalservice name="COOLOFL_INDET"> + <service name="sqlite_file:data/sqlite200/noisy_strips.db" accessMode="read" /> + <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/1.1.0/sqlite200/ALLP200.db" accessMode="read" /> + </logicalservice> + </servicelist> diff --git a/DetectorDescription/GeoModel/FaserGeoModel/python/GeoModelInit.py b/DetectorDescription/GeoModel/FaserGeoModel/python/GeoModelInit.py index af1ed16dcb630fd08948f6a648d9deb6d46756e2..d17c8e39d932df7be5a7265d7b4c2403a0546430 100644 --- a/DetectorDescription/GeoModel/FaserGeoModel/python/GeoModelInit.py +++ b/DetectorDescription/GeoModel/FaserGeoModel/python/GeoModelInit.py @@ -95,7 +95,9 @@ def _setupGeoModel(): # Deal with SCT alignment conditions folders and algorithms - conddb.addFolderSplitOnline("SCT","/Tracker/Onl/Align","/Tracker/Align",className="AlignableTransformContainer") + #conddb.addFolderSplitOnline("SCT","/Tracker/Onl/Align","/Tracker/Align",className="AlignableTransformContainer") + print("Override Alignment dbname to OFLP200, fix this when alignment available in CONDBR3") + conddb.addFolder("/Tracker/Align", "SCT_OFL",className="AlignableTransformContainer",db="OFLP200") from AthenaCommon.AlgSequence import AthSequencer condSeq = AthSequencer("AthCondSeq") if not hasattr(condSeq, "FaserSCT_AlignCondAlg"): diff --git a/Scintillator/ScintDigiAlgs/python/ScintDigiAlgsConfig.py b/Scintillator/ScintDigiAlgs/python/ScintDigiAlgsConfig.py index 7436ada49b6448e4d1b12309c8faf8396fae9a1d..407ab913201a42845581e5543e1455b7b89c5867 100644 --- a/Scintillator/ScintDigiAlgs/python/ScintDigiAlgsConfig.py +++ b/Scintillator/ScintDigiAlgs/python/ScintDigiAlgsConfig.py @@ -36,11 +36,15 @@ def ScintWaveformDigitizationCfg(flags): if not flags.Input.isMC: return acc - if "TB" not in flags.GeoModel.FaserVersion: - acc.merge(ScintWaveformDigiCfg(flags, "TimingWaveformDigiAlg", "Trigger")) - acc.merge(ScintWaveformDigiCfg(flags, "VetoWaveformDigiAlg", "Veto")) - acc.merge(ScintWaveformDigiCfg(flags, "VetoNuWaveformDigiAlg", "VetoNu")) - acc.merge(ScintWaveformDigiCfg(flags, "PreshowerWaveformDigiAlg", "Preshower")) + if "TB" in flags.GeoModel.FaserVersion: + acc.merge(ScintWaveformDigiCfg(flags, "VetoWaveformDigiAlg", "Veto")) + acc.merge(ScintWaveformDigiCfg(flags, "PreshowerWaveformDigiAlg", "Preshower")) + else: + acc.merge(ScintWaveformDigiCfg(flags, "TriggerWaveformDigiAlg", "Trigger")) + acc.merge(ScintWaveformDigiCfg(flags, "VetoWaveformDigiAlg", "Veto")) + acc.merge(ScintWaveformDigiCfg(flags, "VetoNuWaveformDigiAlg", "VetoNu")) + acc.merge(ScintWaveformDigiCfg(flags, "PreshowerWaveformDigiAlg", "Preshower")) + acc.merge(ScintWaveformDigitizationOutputCfg(flags)) acc.merge(WaveformCableMappingCfg(flags)) return acc @@ -57,6 +61,11 @@ def ScintWaveformDigiCfg(flags, name="ScintWaveformDigiAlg", source="", **kwargs kwargs.setdefault("WaveformContainerKey", source+"Waveforms") digiAlg = CompFactory.ScintWaveformDigiAlg(name, **kwargs) + + if "TB" in flags.GeoModel.FaserVersion and source == "Veto": + # The testbeam counters were actually VetoNu, so use those parameters + source = "VetoNu" + digiAlg.CB_alpha = dict_CB_param[source]["CB_alpha"] digiAlg.CB_n = dict_CB_param[source]["CB_n"] digiAlg.CB_mean = dict_CB_param[source]["CB_mean"] diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/CMakeLists.txt b/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/CMakeLists.txt index c2a3d1b2056e307e861b27ea0bef0411f754db7b..af29f45685a723d97b25c290c39ae2d52fe69e91 100644 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/CMakeLists.txt +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/CMakeLists.txt @@ -10,6 +10,7 @@ find_package( Boost COMPONENTS filesystem thread system ) # Component(s) in the package: atlas_add_component( FaserSCT_ConditionsAlgorithms + src/*.h src/*.cxx src/components/*.cxx INCLUDE_DIRS ${Boost_INCLUDE_DIRS} diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/src/FaserSCT_ConfigurationCondAlg.cxx b/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/src/FaserSCT_ConfigurationCondAlg.cxx new file mode 100644 index 0000000000000000000000000000000000000000..113b68c4964e09c7e567e24a1bb297844fcdac82 --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/src/FaserSCT_ConfigurationCondAlg.cxx @@ -0,0 +1,126 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include "FaserSCT_ConfigurationCondAlg.h" + +#include "Identifier/Identifier.h" +#include "Identifier/IdentifierHash.h" +#include "TrackerIdentifier/FaserSCT_ID.h" +#include "TrackerReadoutGeometry/SiDetectorElement.h" + +#include <memory> + + +FaserSCT_ConfigurationCondAlg::FaserSCT_ConfigurationCondAlg(const std::string& name, ISvcLocator* pSvcLocator) : + ::AthReentrantAlgorithm(name, pSvcLocator) {} + +StatusCode FaserSCT_ConfigurationCondAlg::initialize() { + ATH_MSG_DEBUG("initialize " << name()); + + // CondSvc + ATH_CHECK(m_condSvc.retrieve()); + + ATH_CHECK(detStore()->retrieve(m_idHelper, "FaserSCT_ID")); + + // Read Cond Handle + ATH_CHECK(m_readKey.initialize()); + + // Write Cond Handle + ATH_CHECK(m_writeKey.initialize()); + if (m_condSvc->regHandle(this, m_writeKey).isFailure()) { + ATH_MSG_FATAL("unable to register WriteCondHandle " << m_writeKey.fullKey() << " with CondSvc"); + return StatusCode::FAILURE; + } + + return StatusCode::SUCCESS; +} + +StatusCode FaserSCT_ConfigurationCondAlg::execute(const EventContext& ctx) const { + ATH_MSG_DEBUG("execute " << name()); + + // Write Cond Handle + SG::WriteCondHandle<FaserSCT_ConfigurationCondData> writeHandle{m_writeKey, ctx}; + // Do we have a valid Write Cond Handle for current time? + if (writeHandle.isValid()) { + ATH_MSG_DEBUG("CondHandle " << writeHandle.fullKey() << " is already valid. " + << "In theory this should not be called, but may happen" + << " if multiple concurrent events are being processed out of order."); + return StatusCode::SUCCESS; + } + + // Construct the output Cond Object and fill it in + std::unique_ptr<FaserSCT_ConfigurationCondData> writeCdo{std::make_unique<FaserSCT_ConfigurationCondData>()}; + // clear structures before filling + writeCdo->clear(); + + if (writeCdo == nullptr) { + ATH_MSG_FATAL("Pointer of derived conditions object is null"); + return StatusCode::FAILURE; + } + + // Get read handle + SG::ReadCondHandle<CondAttrListCollection> readHandle{m_readKey, ctx}; + const CondAttrListCollection* readCdo {*readHandle}; + if (readCdo==nullptr) { + ATH_MSG_FATAL("Null pointer to the read conditions object"); + return StatusCode::FAILURE; + } + ATH_MSG_INFO("Size of " << m_readKey.key() << " is " << readCdo->size()); + + // Get the validity range + EventIDRange rangeW; + if (not readHandle.range(rangeW)) { + ATH_MSG_FATAL("Failed to retrieve validity range for " << readHandle.key()); + return StatusCode::FAILURE; + } + ATH_MSG_DEBUG("Size of CondAttrListCollection " << readHandle.fullKey() << " readCdo->size()= " << readCdo->size()); + ATH_MSG_DEBUG("Range of input is " << rangeW); + + writeHandle.addDependency(readHandle); + + std::string sensorParam{"sensor"}; + std::string stripParam{"strip"}; + std::string occupancyParam{"occupancy"}; + CondAttrListCollection::const_iterator itr{readCdo->begin()}; + CondAttrListCollection::const_iterator end{readCdo->end()}; + // CondAttrListCollection doesn't support C++11 type loops, no generic 'begin' + for (; itr != end; ++itr) { + // A CondAttrListCollection is a map of ChanNum and AttributeList + CondAttrListCollection::ChanNum channelNumber{itr->first}; + const CondAttrListCollection::AttributeList &payload{itr->second}; + if (payload.exists(sensorParam) and not payload[sensorParam].isNull() and + payload.exists(stripParam) and not payload[stripParam].isNull() and + payload.exists(occupancyParam) and not payload[occupancyParam].isNull()) + { + auto sensorVal {payload[sensorParam].data<int>()}; + auto stripVal {payload[stripParam].data<int>()}; + float occupancyVal {payload[occupancyParam].data<float>()}; + Identifier waferId = m_idHelper->wafer_id(sensorVal); + Identifier stripId = m_idHelper->strip_id(waferId, stripVal); + if (stripId.is_valid() and occupancyVal > m_occupancyThreshold) + writeCdo->setBadStripId(stripId, sensorVal, stripVal); + ATH_MSG_VERBOSE(m_idHelper->station(stripId) << "/" << m_idHelper->layer(stripId) << "/" + << m_idHelper->phi_module(stripId) << "/" << m_idHelper->phi_module(stripId) << "/" + << m_idHelper->side(stripId) << "/" << m_idHelper->strip(stripId) << " : " + << occupancyVal); + } else { + ATH_MSG_WARNING(sensorParam << " and/or " << stripParam << " and/or " << occupancyParam << " does not exist."); + } + } + + if (writeHandle.record(std::move(writeCdo)).isFailure()) { + ATH_MSG_FATAL("Could not record SCT_ConfigurationCondData " << writeHandle.key() + << " with EventRange " << writeHandle.getRange() + << " into Conditions Store"); + return StatusCode::FAILURE; + } + ATH_MSG_INFO("recorded new CDO " << writeHandle.key() << " with range " << writeHandle.getRange() << " into Conditions Store"); + + return StatusCode::SUCCESS; +} + +StatusCode FaserSCT_ConfigurationCondAlg::finalize() { + ATH_MSG_DEBUG("finalize " << name()); + return StatusCode::SUCCESS; +} diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/src/FaserSCT_ConfigurationCondAlg.h b/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/src/FaserSCT_ConfigurationCondAlg.h new file mode 100644 index 0000000000000000000000000000000000000000..44f81c4282b0a50d1859f31396a8d4dd8a8f335e --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/src/FaserSCT_ConfigurationCondAlg.h @@ -0,0 +1,43 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef FASERSCT_CONFIGURATIONCONDALG_H +#define FASERSCT_CONFIGURATIONCONDALG_H + +#include "AthenaBaseComps/AthReentrantAlgorithm.h" +#include "AthenaPoolUtilities/CondAttrListCollection.h" + +#include "StoreGate/ReadCondHandleKey.h" +#include "StoreGate/WriteCondHandleKey.h" + +#include "GaudiKernel/ICondSvc.h" +#include "GaudiKernel/ServiceHandle.h" + +// TODO use instead SCT_ConfigurationCondData? +// #include "SCT_ConditionsData/SCT_ConfigurationCondData.h" +#include "FaserSCT_ConditionsData/FaserSCT_ConfigurationCondData.h" + + +class FaserSCT_ID; + +class FaserSCT_ConfigurationCondAlg : public AthReentrantAlgorithm { + public: + FaserSCT_ConfigurationCondAlg(const std::string& name, ISvcLocator* pSvcLocator); + virtual ~FaserSCT_ConfigurationCondAlg() = default; + virtual StatusCode initialize() override; + virtual StatusCode execute(const EventContext& ctx) const override; + virtual StatusCode finalize() override; + virtual bool isClonable() const override { return true; }; + + private: + SG::ReadCondHandleKey<CondAttrListCollection> m_readKey {this, "ReadKey", "/SCT/DAQ/NoisyStrips", "Key of input noisy strips folder"}; + SG::WriteCondHandleKey<FaserSCT_ConfigurationCondData> m_writeKey{this, "WriteKey", "FaserSCT_ConfigurationCondData", "Key of output (derived) conditions data"}; + ServiceHandle<ICondSvc> m_condSvc{this, "CondSvc", "CondSvc"}; + const FaserSCT_ID* m_idHelper{nullptr}; + Gaudi::Property<double> m_occupancyThreshold {this, "OccupancyThreshold", 0.01, "Mask strips with an occupancy larger than the OccupancyCut"}; +}; + + + +#endif //FASERSCT_CONFIGURATIONCONDALG_H diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/src/components/FaserSCT_ConditionsAlgorithms_entries.cxx b/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/src/components/FaserSCT_ConditionsAlgorithms_entries.cxx index f8b127e85d0a477d87019556c48cc0873a19eee8..465ffd956838ce71cb17495156804176be756ebc 100644 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/src/components/FaserSCT_ConditionsAlgorithms_entries.cxx +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsAlgorithms/src/components/FaserSCT_ConditionsAlgorithms_entries.cxx @@ -34,6 +34,7 @@ // #include "../SCT_StripVetoTestAlg.h" // #include "../SCT_TdaqEnabledCondAlg.h" // #include "../SCT_TdaqEnabledTestAlg.h" +#include "../FaserSCT_ConfigurationCondAlg.h" DECLARE_COMPONENT( FaserSCT_AlignCondAlg ) // DECLARE_COMPONENT( SCT_ByteStreamErrorsTestAlg ) @@ -71,3 +72,4 @@ DECLARE_COMPONENT( FaserSCT_SiliconTempCondAlg ) // DECLARE_COMPONENT( SCT_StripVetoTestAlg ) // DECLARE_COMPONENT( SCT_TdaqEnabledCondAlg ) // DECLARE_COMPONENT( SCT_TdaqEnabledTestAlg ) +DECLARE_COMPONENT( FaserSCT_ConfigurationCondAlg ) diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsData/CMakeLists.txt b/Tracker/TrackerConditions/FaserSCT_ConditionsData/CMakeLists.txt index f13ae3272499c4b66d5dab008bff2a1b92081f88..16fcf75c2d6c9caff8ea4c3a18e143f6d7010436 100644 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsData/CMakeLists.txt +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsData/CMakeLists.txt @@ -8,6 +8,7 @@ atlas_subdir( FaserSCT_ConditionsData ) # Component(s) in the package: atlas_add_library( FaserSCT_ConditionsData + FaserSCT_ConditionsData/*.h src/*.cxx PUBLIC_HEADERS FaserSCT_ConditionsData LINK_LIBRARIES AthenaPoolUtilities Identifier ) diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsData/FaserSCT_ConditionsData/FaserSCT_ConfigurationCondData.h b/Tracker/TrackerConditions/FaserSCT_ConditionsData/FaserSCT_ConditionsData/FaserSCT_ConfigurationCondData.h new file mode 100644 index 0000000000000000000000000000000000000000..0ff7a294584d2cf7d318cfce8ab39a044dd7da5c --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsData/FaserSCT_ConditionsData/FaserSCT_ConfigurationCondData.h @@ -0,0 +1,98 @@ +#ifndef FASERSCT_CONFIGURATIONCONDDATA_H +#define FASERSCT_CONFIGURATIONCONDDATA_H + +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include <array> +#include <bitset> +#include <map> +#include <set> + +// Include Athena stuff +#include "Identifier/Identifier.h" +#include "Identifier/IdentifierHash.h" + +/** + * @class FaserSCT_ConfigurationCondData + * @brief Class for data object used in SCT_ConfigurationCondAlg and SCT_ConfigurationConditionsTool. + **/ +class FaserSCT_ConfigurationCondData { +public: + + /// Constructor + FaserSCT_ConfigurationCondData(); + + /// Destructor + virtual ~FaserSCT_ConfigurationCondData() = default; + + /// Set a bad strip identifier + void setBadStripId(const Identifier& badStripId, const IdentifierHash& hash, const int strip); + /// Get all bad strip identifiers + const std::set<Identifier>* getBadStripIds() const; + /// Clear all bad strip identifiers + void clearBadStripIds(); + /// Check if a strip identifier is bad one + bool isBadStrip(const IdentifierHash& hash, const int strip) const; + + /// Set a bad wafer identifier + void setBadWaferId(const Identifier& badWaferId); + /// Get all bad wafer identifiers + const std::set<Identifier>* getBadWaferIds() const; + /// Clear all bad wafer identifiers + void clearBadWaferIds(); + /// Check if a wafer identifier is bad one + bool isBadWaferId(const Identifier& waferId) const; + + /// Set a bad module identifier + void setBadModuleId(const Identifier& badModuleId); + /// Get all bad module identifiers + const std::set<Identifier>* getBadModuleIds() const; + /// Clear all bad module identifiers + void clearBadModuleIds(); + /// Check if a module identifier is bad one + bool isBadModuleId(const Identifier& moduleId) const; + + /// Set bad links for a module + void setBadLinks(const IdentifierHash& hash, const bool isBadLink0, const bool isBadLink1); + /// Get all bad links + const std::map<IdentifierHash, std::pair<bool, bool>>* getBadLinks() const; + /// Clear all bad links + void clearBadLinks(); + /// Check if a module has bad links + std::pair<bool, bool> areBadLinks(const IdentifierHash& hash) const; + + /// Set bad chips for a module + void setBadChips(const Identifier& moduleId, const unsigned int chipStatus); + /// Get bad chips for a module + unsigned int getBadChips(const Identifier& moduleId) const; + /// Get all bad chips + const std::map<Identifier, unsigned int>* getBadChips() const; + /// Clear all bad chips + void clearBadChips(); + + /// Clear all bad information + void clear(); + +private: + enum {N_MODULES=4088, N_STRIPS=768, N_SIDES=2}; + + std::set<Identifier> m_badStripIds; + std::array<std::bitset<N_STRIPS>, N_MODULES*N_SIDES> m_badStripArray; + std::set<Identifier> m_badWaferIds; + std::set<Identifier> m_badModuleIds; + std::map<IdentifierHash, std::pair<bool, bool>> m_badLinks; + std::array<std::pair<bool, bool>, N_MODULES> m_badLinksArray; + std::map<Identifier, unsigned int> m_badChips; +}; + +// Class definition for StoreGate +#include "AthenaKernel/CLASS_DEF.h" +CLASS_DEF( FaserSCT_ConfigurationCondData , 81691222 , 1 ) + +// Condition container definition for CondInputLoader +#include "AthenaKernel/CondCont.h" +CONDCONT_DEF( FaserSCT_ConfigurationCondData, 230890898 ); + +#endif // FASERSCT_CONFIGURATIONCONDDATA_H diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsData/data/BField_DataConditions.py b/Tracker/TrackerConditions/FaserSCT_ConditionsData/data/BField_DataConditions.py new file mode 100755 index 0000000000000000000000000000000000000000..94846ee80dedb41dbc2354a6662873203aad9cb6 --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsData/data/BField_DataConditions.py @@ -0,0 +1,85 @@ +#!/bin/env python + +# Use this to add a field map to the CONDBR3 database for real data +# Copied the result from the OFLP200 DB +# Note that the testbeam turns off the field by setting scale = 0 +description = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header clid="1238547719" service_type="71" /></addrHeader><typeName>CondAttrListCollection</typeName>' + +descriptionDCS = '<timeStamp>time</timeStamp><addrHeader><address_header service_type="71" clid="1238547719" /></addrHeader><typeName>CondAttrListCollection</typeName><cache>600</cache>' + +descriptionAlign = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header service_type="256" clid="1170039409" /></addrHeader><typeName>AlignableTransformContainer</typeName>' + +import sys +from PyCool import cool, coral +from CoolConvUtilities.AtlCoolLib import indirectOpen + +dbSvc = cool.DatabaseSvcFactory.databaseService() +connectString = 'sqlite://;schema=ALLP200.db;dbname=CONDBR3' + +print('generating field database') +#dbSvc.dropDatabase( connectString ) +try: + # Open existing instead? + print('Try indirectOpen') + db = indirectOpen( connectString, readOnly=False ) +except Exception as e: + print(e) + print('Problem opening DB, create instead') + db = dbSvc.createDatabase( connectString ) + +glob = db.createFolderSet("/GLOBAL") +glob_bfield = db.createFolderSet("/GLOBAL/BField") + +glob_bfield.createTagRelation("GLOBAL-01", "GLOBAL-BField-01") +glob.createTagRelation("OFLCOND-FASER-01", "GLOBAL-01") + +glob_bfield.createTagRelation("GLOBAL-02", "GLOBAL-BField-02") +glob.createTagRelation("OFLCOND-FASER-02", "GLOBAL-02") + +glob_bfield.createTagRelation("GLOBAL-TB00", "GLOBAL-BField-TB00") +glob.createTagRelation("OFLCOND-FASER-TB00", "GLOBAL-TB00") + +mapSpec = cool.RecordSpecification() +mapSpec.extend( 'FieldType', cool.StorageType.String4k ) +mapSpec.extend( 'MapFileName', cool.StorageType.String4k ) + +mapRecord = cool.Record(mapSpec) +mapRecord['FieldType'] = "GlobalMap" +mapRecord['MapFileName'] = "file:MagneticFieldMaps/FaserFieldTable.root" + +mapFolderSpec = cool.FolderSpecification(cool.FolderVersioning.MULTI_VERSION, mapSpec) +mapFolder = db.createFolder('/GLOBAL/BField/Maps', mapFolderSpec, descriptionDCS, True ) + +mapFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, mapRecord, 1, "GLOBAL-BField-Maps-01", True ) +mapFolder.createTagRelation("GLOBAL-BField-01", "GLOBAL-BField-Maps-01") + +mapFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, mapRecord, 1, "GLOBAL-BField-Maps-02", True ) +mapFolder.createTagRelation("GLOBAL-BField-02", "GLOBAL-BField-Maps-02") + +mapFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, mapRecord, 1, "GLOBAL-BField-Maps-TB00", True ) +mapFolder.createTagRelation("GLOBAL-BField-TB00", "GLOBAL-BField-Maps-TB00") + +scaleSpec = cool.RecordSpecification() +scaleSpec.extend( 'value', cool.StorageType.Float ) + +scaleRecord = cool.Record(scaleSpec) +scaleRecord['value'] = 1.0 + +scaleFolderSpec = cool.FolderSpecification(cool.FolderVersioning.MULTI_VERSION, scaleSpec) +scaleFolder = db.createFolder('/GLOBAL/BField/Scales', scaleFolderSpec, descriptionDCS, True ) + +# Channel names don't seem to be handled properly by Athena +scaleFolder.createChannel( 1, "Dipole_Scale" ) +scaleFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, scaleRecord, 1, "GLOBAL-BField-Scale-01", True ) +scaleFolder.createTagRelation("GLOBAL-BField-01", "GLOBAL-BField-Scale-01") + + +scaleFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, scaleRecord, 1, "GLOBAL-BField-Scale-02", True ) +scaleFolder.createTagRelation("GLOBAL-BField-02", "GLOBAL-BField-Scale-02") + + +scaleRecord['value'] = 0.0 +scaleFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, scaleRecord, 1, "GLOBAL-BField-Scale-TB00", True ) +scaleFolder.createTagRelation("GLOBAL-BField-TB00", "GLOBAL-BField-Scale-TB00") + +db.closeDatabase() diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsData/src/FaserSCT_ConfigurationCondData.cxx b/Tracker/TrackerConditions/FaserSCT_ConditionsData/src/FaserSCT_ConfigurationCondData.cxx new file mode 100644 index 0000000000000000000000000000000000000000..c630fc082a6ff16a7f696696cdc3d58abd689578 --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsData/src/FaserSCT_ConfigurationCondData.cxx @@ -0,0 +1,165 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + + +#include "FaserSCT_ConditionsData/FaserSCT_ConfigurationCondData.h" + +#include <algorithm> +#include <iterator> + +//---------------------------------------------------------------------- +// Constructor +FaserSCT_ConfigurationCondData::FaserSCT_ConfigurationCondData(): + m_badStripIds{}, + m_badStripArray{}, + m_badWaferIds{}, + m_badModuleIds{}, + m_badChips{} +{ + clearBadLinks(); +} + +//---------------------------------------------------------------------- +// Set a bad strip identifier +void FaserSCT_ConfigurationCondData::setBadStripId(const Identifier& badStripId, const IdentifierHash& hash, const int strip) { + m_badStripIds.insert(badStripId); + m_badStripArray[hash].set(strip); +} + +//---------------------------------------------------------------------- +// Get all bad strip identifiers +const std::set<Identifier>* FaserSCT_ConfigurationCondData::getBadStripIds() const { + return &m_badStripIds; +} + +//---------------------------------------------------------------------- +// Clear all bad strip identifiers +void FaserSCT_ConfigurationCondData::clearBadStripIds() { + m_badStripIds.clear(); + m_badStripArray.fill(std::bitset<N_STRIPS>()); +} + +//---------------------------------------------------------------------- +// Check if a strip identifier is bad one +bool FaserSCT_ConfigurationCondData::isBadStrip(const IdentifierHash& hash, const int strip) const { + return m_badStripArray[hash][strip]; +} + +//---------------------------------------------------------------------- +// Set a bad wafer identifier +void FaserSCT_ConfigurationCondData::setBadWaferId(const Identifier& badWaferId) { + m_badWaferIds.insert(badWaferId); +} + +//---------------------------------------------------------------------- +// Get all bad wafer identifiers +const std::set<Identifier>* FaserSCT_ConfigurationCondData::getBadWaferIds() const { + return &m_badWaferIds; +} + +//---------------------------------------------------------------------- +// Clear all bad wafer identifiers +void FaserSCT_ConfigurationCondData::clearBadWaferIds() { + m_badWaferIds.clear(); +} + +//---------------------------------------------------------------------- +// Check if a wafer identifier is bad one +bool FaserSCT_ConfigurationCondData::isBadWaferId(const Identifier& waferId) const { + return (m_badWaferIds.find(waferId)!=m_badWaferIds.end()); +} + +//---------------------------------------------------------------------- +// Set a bad module identifier +void FaserSCT_ConfigurationCondData::setBadModuleId(const Identifier& badModuleId) { + m_badModuleIds.insert(badModuleId); +} + +//---------------------------------------------------------------------- +// Get all bad module identifiers +const std::set<Identifier>* FaserSCT_ConfigurationCondData::getBadModuleIds() const { + return &m_badModuleIds; +} + +//---------------------------------------------------------------------- +// Clear all bad module identifiers +void FaserSCT_ConfigurationCondData::clearBadModuleIds() { + m_badModuleIds.clear(); +} + +//---------------------------------------------------------------------- +// Check if a module identifier is bad one +bool FaserSCT_ConfigurationCondData::isBadModuleId(const Identifier& moduleId) const { + return (m_badModuleIds.find(moduleId)!=m_badModuleIds.end()); +} + +//---------------------------------------------------------------------- +// Set bad links for a module +void FaserSCT_ConfigurationCondData::setBadLinks(const IdentifierHash& hash, const bool isBadLink0, const bool isBadLink1) { + unsigned int iHash{hash}; + iHash = (iHash/2)*2; // Make iHash even + if (m_badLinks.count(iHash)==0) { + m_badLinks.insert(std::pair<IdentifierHash, std::pair<bool, bool>>(iHash, std::pair<bool, bool>(isBadLink0, isBadLink1))); + } else { + m_badLinks[iHash].first &= isBadLink0; + m_badLinks[iHash].second &= isBadLink1; + } + m_badLinksArray[iHash/2].first &= isBadLink0; + m_badLinksArray[iHash/2].second &= isBadLink1; +} + +//---------------------------------------------------------------------- +// Get all bad links +const std::map<IdentifierHash, std::pair<bool, bool>>* FaserSCT_ConfigurationCondData::getBadLinks() const { + return &m_badLinks; +} + +//---------------------------------------------------------------------- +// Clear all bad links +void FaserSCT_ConfigurationCondData::clearBadLinks() { + m_badLinks.clear(); + m_badLinksArray.fill(std::make_pair(true, true)); +} + +//---------------------------------------------------------------------- +// Check if a module has bad links +std::pair<bool, bool> FaserSCT_ConfigurationCondData::areBadLinks(const IdentifierHash& hash) const { + // Bad convetion is used. true is for good link and false is for bad link... + return m_badLinksArray[hash/2]; +} + +//---------------------------------------------------------------------- +// Set bad chips for a module +void FaserSCT_ConfigurationCondData::setBadChips(const Identifier& moduleId, const unsigned int chipStatus) { + if (chipStatus!=0) m_badChips[moduleId] = chipStatus; +} + +//---------------------------------------------------------------------- +// bad chips for a module +unsigned int FaserSCT_ConfigurationCondData::getBadChips(const Identifier& moduleId) const { + std::map<Identifier, unsigned int>::const_iterator it{m_badChips.find(moduleId)}; + return (it!=m_badChips.end()) ? (*it).second : 0; +} + +//---------------------------------------------------------------------- +// Get all bad chips +const std::map<Identifier, unsigned int>* FaserSCT_ConfigurationCondData::getBadChips() const { + return &m_badChips; +} + +//---------------------------------------------------------------------- +// Clear all bad chips +void FaserSCT_ConfigurationCondData::clearBadChips() { + m_badChips.clear(); +} + +//---------------------------------------------------------------------- +// Clear all bad information +void FaserSCT_ConfigurationCondData::clear() { + clearBadStripIds(); + clearBadWaferIds(); + clearBadModuleIds(); + clearBadLinks(); + clearBadChips(); +} diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/CMakeLists.txt b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/CMakeLists.txt index 1450b0a07133ff3ea8a5517244cbdb9a54b0262a..a89fc207ed29dfc53e48e3182d7dad88d3a0bfb9 100644 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/CMakeLists.txt +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/CMakeLists.txt @@ -13,6 +13,7 @@ find_package( GMock ) # Component(s) in the package: atlas_add_component ( FaserSCT_ConditionsTools + FaserSCT_ConditionsTools/*.h src/components/*.cxx INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS} LINK_LIBRARIES ${ROOT_LIBRARIES} ${CLHEP_LIBRARIES} AthenaKernel FaserSCT_ConditionsToolsLib Identifier GeoModelUtilities GeoModelFaserUtilities GaudiKernel AthenaBaseComps StoreGateLib SGtests xAODEventInfo FaserSCT_ConditionsData InDetByteStreamErrors TrackerIdentifier TrackerReadoutGeometry FaserSiPropertiesToolLib InDetConditionsSummaryService ) diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/FaserSCT_ConditionsTools/ISCT_ConditionsSummaryTool.h b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/FaserSCT_ConditionsTools/ISCT_ConditionsSummaryTool.h new file mode 100644 index 0000000000000000000000000000000000000000..8fdc8ebc3ce9214db846e19651041909b6439dbf --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/FaserSCT_ConditionsTools/ISCT_ConditionsSummaryTool.h @@ -0,0 +1,33 @@ +/* + Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef ISCT_CONDITIONSSUMMARYTOOL_H +#define ISCT_CONDITIONSSUMMARYTOOL_H + +#include "GaudiKernel/IInterface.h" +#include "Identifier/IdContext.h" + +#include "InDetConditionsSummaryService/InDetHierarchy.h" + +class Identifier; +class IdentifierHash; + +/** + * @class ISCT_ConditionsSummaryTool + * Interface class for service providing summary of status of a detector element +**/ + +class ISCT_ConditionsSummaryTool: virtual public IInterface, virtual public IAlgTool { +public: + virtual ~ISCT_ConditionsSummaryTool() = default; + /// Creates the InterfaceID and interfaceID() method + DeclareInterfaceID(ISCT_ConditionsSummaryTool, 1, 0); + + virtual bool isGood(const Identifier& elementId, const InDetConditions::Hierarchy h) const =0; + virtual bool isGood(const Identifier& elementId, const EventContext& ctx, const InDetConditions::Hierarchy h) const =0; + virtual bool isGood(const IdentifierHash& elementHash) const =0; + virtual bool isGood(const IdentifierHash& elementHash, const EventContext& ctx) const =0; +}; + +#endif //ISCT_CONDITIONSSUMMARYTOOL_H diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/FaserSCT_ConditionsTools/ISCT_ConfigurationConditionsTool.h b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/FaserSCT_ConditionsTools/ISCT_ConfigurationConditionsTool.h index 597047503e59fdbf179b02eb95ddc4cc4b777358..c4d4ca0eeaead8ad6cbbe72e5d7a8979d938a528 100644 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/FaserSCT_ConditionsTools/ISCT_ConfigurationConditionsTool.h +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/FaserSCT_ConditionsTools/ISCT_ConfigurationConditionsTool.h @@ -15,7 +15,7 @@ #include <map> #include "InDetConditionsSummaryService/InDetHierarchy.h" -#include "FaserSCT_ConditionsTools/ISCT_ConditionsTool.h" +#include "ISCT_ConditionsTool.h" class Identifier; class IdentifierHash; diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/FaserSCT_ConditionsTools/ISCT_NoisyStripTool.h b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/FaserSCT_ConditionsTools/ISCT_NoisyStripTool.h deleted file mode 100644 index 9fcf6b9a54f891e52edcedb729a3ff8510af319a..0000000000000000000000000000000000000000 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/FaserSCT_ConditionsTools/ISCT_NoisyStripTool.h +++ /dev/null @@ -1,23 +0,0 @@ -/* - Copyright (C) 2002-2022 CERN for the benefit of the ATLAS and FAsER collaborations -*/ - -#ifndef ISCT_NOISY_STRIP_TOOL -#define ISCT_NOISY_STRIP_TOOL - -#include "GaudiKernel/IAlgTool.h" -#include "GaudiKernel/EventContext.h" -#include <vector> - - -class ISCT_NoisyStripTool: virtual public IAlgTool { -public: - virtual ~ISCT_NoisyStripTool() = default; - - DeclareInterfaceID(ISCT_NoisyStripTool, 1, 0); - - virtual std::map<std::pair<int,int>, double> getNoisyStrips(const EventContext& ctx) const = 0; - virtual std::map<std::pair<int,int>, double> getNoisyStrips(void) const = 0; -}; - -#endif // ISCT_NOISY_STRIP_TOOL diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_ConditionsSummaryToolConfig.py b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_ConditionsSummaryToolConfig.py new file mode 100644 index 0000000000000000000000000000000000000000..8f07c6531bc9023cf6959cff9e9ccc44fb9990af --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_ConditionsSummaryToolConfig.py @@ -0,0 +1,34 @@ +# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration +from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator +from AthenaConfiguration.ComponentFactory import CompFactory +from IOVDbSvc.IOVDbSvcConfig import addFolders +from SCT_GeoModel.SCT_GeoModelConfig import SCT_ReadoutGeometryCfg + +FaserSCT_ConfigurationCondAlg = CompFactory.FaserSCT_ConfigurationCondAlg +FaserSCT_ConfigurationConditionsTool = CompFactory.FaserSCT_ConfigurationConditionsTool + +def FaserSCT_ConfigurationCondAlgCfg(flags, name="FaserSCT_ConfigurationCondAlg", **kwargs): + acc = ComponentAccumulator() + dbInstance = kwargs.get("dbInstance", "INDET_OFL") + dbFolder = kwargs.get("dbFolder", "/SCT/DAQ/NoisyStrips") + #dbFolder = kwargs.get("dbFolder", "/INDET/NoisyStrips") + acc.merge(addFolders(flags, dbFolder, dbInstance, className="CondAttrListCollection")) + acc.addCondAlgo(FaserSCT_ConfigurationCondAlg(name, **kwargs)) + return acc + + +def FaserSCT_ConfigurationConditionsToolCfg(flags, name="FaserSCT_ConfigurationCondAlg", **kwargs): + acc = ComponentAccumulator() + acc.merge(FaserSCT_ConfigurationCondAlgCfg(flags, name="FaserSCT_ConfigurationCondAlg", **kwargs)) + acc.setPrivateTools(FaserSCT_ConfigurationConditionsTool(name, **kwargs)) + return acc + + +def FaserSCT_ConditionsSummaryToolCfg(flags, name="FaserSCT_ConditionsSummaryTool", **kwargs): + acc = ComponentAccumulator() + ConditionsTools = [] + SCT_ConfigurationConditionsTool = acc.popToolsAndMerge(FaserSCT_ConfigurationConditionsToolCfg(flags)) + ConditionsTools += [ SCT_ConfigurationConditionsTool ] + kwargs.setdefault("ConditionsTools", ConditionsTools) + acc.setPrivateTools(CompFactory.FaserSCT_ConditionsSummaryTool(name=name, **kwargs)) + return acc diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_DCSConditionsConfig.py b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_DCSConditionsConfig.py index cc1ddf6c99661d1fdbace11d74e7c3785e90be9e..42b71a58c0ba31f95f196a509134218e01e7e28c 100644 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_DCSConditionsConfig.py +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_DCSConditionsConfig.py @@ -29,7 +29,7 @@ def FaserSCT_DCSConditionsCfg(flags, name="TrackerSCT_DCSConditions", **kwargs): tempFolder = kwargs.get("tempFolder", "/SCT/DCS/MODTEMP") stateFolder = kwargs.get("stateFolder", "/SCT/DCS/CHANSTAT") if tool.ReadAllDBFolders == tool.ReturnHVTemp: - acc.merge(addFolders(flags, stateFolder, dbInstance, className="CondAttrListCollection")) + acc.merge(addFolders(flags, stateFolder, dbInstance, className="CondAttrListCollection",db="OFLP200")) # algo statArgs = { "name": name + "StatCondAlg", @@ -40,7 +40,7 @@ def FaserSCT_DCSConditionsCfg(flags, name="TrackerSCT_DCSConditions", **kwargs): statAlg = FaserSCT_DCSConditionsStatCondAlg(**statArgs) acc.addCondAlgo(statAlg) if tool.ReturnHVTemp: - acc.merge(addFolders(flags, [hvFolder, tempFolder], dbInstance, className="CondAttrListCollection")) + acc.merge(addFolders(flags, [hvFolder, tempFolder], dbInstance, className="CondAttrListCollection",db="OFLP200")) hvAlg = FaserSCT_DCSConditionsHVCondAlg(name=name + "HVCondAlg", ReadKey=hvFolder) acc.addCondAlgo(hvAlg) tempAlg = FaserSCT_DCSConditionsTempCondAlg(name=name + "TempCondAlg", ReadKey=tempFolder) diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_NoisyStripsConfig.py b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_NoisyStripsConfig.py new file mode 100644 index 0000000000000000000000000000000000000000..2ac8a632f87fdc8faf7046001188806dfd1a6dd0 --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_NoisyStripsConfig.py @@ -0,0 +1,23 @@ +"""Define methods to configure FaserSCT_NoisyStrips + +Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +""" +from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator +from AthenaConfiguration.ComponentFactory import CompFactory +from IOVDbSvc.IOVDbSvcConfig import addFolders +FaserSCT_NoisyStripsTool=CompFactory.FaserSCT_NoisyStripTool + +def FaserSCT_NoisyStripsToolCfg(flags, name="NoisyStripsTool", **kwargs): + """Return a configured FaserSCT_NoisyStripsTool""" + return FaserSCT_NoisyStripsTool(name, **kwargs) + +def FaserSCT_NoisyStripsCfg(flags, **kwargs): + """Return configured ComponentAccumulator and tool for FaserSCT_NoisyStrips + NoisyStripsTool may be provided in kwargs + """ + acc = ComponentAccumulator() + dbInstance = kwargs.get("dbInstance", "INDET_OFL") + dbFolder = kwargs.get("dbFolder", "/SCT/DAQ/NoisyStrips") + acc.merge(addFolders(flags, dbFolder, dbInstance, className="CondAttrListCollection")) + # acc.addPublicTool(tool) + return acc diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConditionsSummaryTool.cxx b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConditionsSummaryTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..afa408534cb14480c4664d5f3055af1b94e1126a --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConditionsSummaryTool.cxx @@ -0,0 +1,49 @@ +/* + Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration +*/ + + +#include "FaserSCT_ConditionsSummaryTool.h" +#include "FaserSCT_ConditionsTools/ISCT_ConditionsTool.h" + + +FaserSCT_ConditionsSummaryTool::FaserSCT_ConditionsSummaryTool(const std::string& type, const std::string& name, const IInterface* parent) : + base_class(type, name, parent), m_toolHandles{this} { + declareProperty("ConditionsTools", m_toolHandles); +} + +StatusCode FaserSCT_ConditionsSummaryTool::initialize() { + ATH_CHECK(m_toolHandles.retrieve()); + m_noReports = m_toolHandles.empty(); + return StatusCode::SUCCESS; +} + +bool FaserSCT_ConditionsSummaryTool::isGood(const Identifier& elementId, const EventContext& ctx, const InDetConditions::Hierarchy h) const { + if (not m_noReports) { + for (const ToolHandle<ISCT_ConditionsTool>& tool: m_toolHandles) { + if (tool->canReportAbout(h) and (not tool->isGood(elementId, ctx, h))) return false; + } + } + return true; +} + +bool FaserSCT_ConditionsSummaryTool::isGood(const Identifier& elementId, const InDetConditions::Hierarchy h) const { + return isGood(elementId, Gaudi::Hive::currentContext(), h); +} + +bool FaserSCT_ConditionsSummaryTool::isGood(const IdentifierHash& elementHash, const EventContext& ctx) const { + if (not m_noReports) { + for (const ToolHandle<ISCT_ConditionsTool>& tool: m_toolHandles) { + if ((tool->canReportAbout(InDetConditions::SCT_SIDE) or + tool->canReportAbout(InDetConditions::SCT_MODULE)) and + (not tool->isGood(elementHash, ctx))) { + return false; + } + } + } + return true; +} + +bool FaserSCT_ConditionsSummaryTool::isGood(const IdentifierHash& elementHash) const { + return isGood(elementHash, Gaudi::Hive::currentContext()); +} diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConditionsSummaryTool.h b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConditionsSummaryTool.h new file mode 100644 index 0000000000000000000000000000000000000000..681ac2be455a5ab243f61165c99022a169f8ee3e --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConditionsSummaryTool.h @@ -0,0 +1,38 @@ +#ifndef FASERSCT_CONDITIONSSUMMARYTOOL_H +#define FASERSCT_CONDITIONSSUMMARYTOOL_H + +#include "AthenaBaseComps/AthAlgTool.h" +#include "InDetConditionsSummaryService/InDetHierarchy.h" +#include "FaserSCT_ConditionsTools/ISCT_ConditionsSummaryTool.h" + +#include "GaudiKernel/ToolHandle.h" +#include "GaudiKernel/EventContext.h" + +#include <string> +#include <vector> + +class ISCT_ConditionsTool; + +/** + * @class FaserSCT_ConditionsSummaryTool + * Interface class for tool providing summary of status of an SCT detector element +**/ +class FaserSCT_ConditionsSummaryTool: public extends<AthAlgTool, ISCT_ConditionsSummaryTool> { +public: + FaserSCT_ConditionsSummaryTool(const std::string& type, const std::string& name, const IInterface* parent); //!< Tool constructor + virtual ~FaserSCT_ConditionsSummaryTool() = default; + virtual StatusCode initialize() override; + + virtual bool isGood(const Identifier& elementId, const InDetConditions::Hierarchy h) const override; + virtual bool isGood(const Identifier& elementId, const EventContext& ctx, const InDetConditions::Hierarchy h) const override; + virtual bool isGood(const IdentifierHash& elementHash) const override; + virtual bool isGood(const IdentifierHash& elementHash, const EventContext& ctx) const override; + +private: + StringArrayProperty m_reportingTools; //!< list of tools to be used + ToolHandleArray<ISCT_ConditionsTool> m_toolHandles; + bool m_noReports{true}; +}; + + +#endif // FASERSCT_CONDITIONSSUMMARYTOOL_H diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConfigurationConditionsTool.cxx b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConfigurationConditionsTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..cb444a12f271273cc0cc4f7679b505df713f100e --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConfigurationConditionsTool.cxx @@ -0,0 +1,327 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include "FaserSCT_ConfigurationConditionsTool.h" + +// Athena includes +#include "TrackerIdentifier/FaserSCT_ID.h" +// #include "InDetReadoutGeometry/SiDetectorElement.h" +#include "StoreGate/ReadCondHandle.h" + +// Constructor +FaserSCT_ConfigurationConditionsTool::FaserSCT_ConfigurationConditionsTool(const std::string& type, const std::string& name, const IInterface* parent) : + base_class(type, name, parent) +{ +} + +// Initialize +StatusCode FaserSCT_ConfigurationConditionsTool::initialize() { + ATH_MSG_DEBUG("Initializing configuration"); + + ATH_CHECK(detStore()->retrieve(m_idHelper, "FaserSCT_ID")); + + // Read Cond Handle Key + ATH_CHECK(m_condKey.initialize()); + // ATH_CHECK(m_SCTDetEleCollKey.initialize()); + + return StatusCode::SUCCESS; +} + +// What level of element can this service report about +bool FaserSCT_ConfigurationConditionsTool::canReportAbout(InDetConditions::Hierarchy h) const { + return (h == InDetConditions::SCT_STRIP or + h == InDetConditions::SCT_CHIP or + h == InDetConditions::SCT_SIDE or + h == InDetConditions::SCT_MODULE or + h == InDetConditions::DEFAULT); +} + +// Is an element with this Identifier and hierarchy good? +bool FaserSCT_ConfigurationConditionsTool::isGood(const Identifier& elementId, const EventContext& ctx, InDetConditions::Hierarchy h) const { + if (not canReportAbout(h)) return true; + + const FaserSCT_ConfigurationCondData* condData{getCondData(ctx)}; + if (condData==nullptr) { + ATH_MSG_ERROR("In isGood, FaserSCT_ConfigurationCondData pointer cannot be retrieved"); + return false; + } + + bool result{true}; + if (h == InDetConditions::SCT_STRIP) { + result = (not condData->isBadStrip(m_idHelper->wafer_hash(m_idHelper->wafer_id(elementId)), + m_idHelper->strip(elementId))); + // If strip itself is not bad, check if it's in a bad module + if (result and m_checkStripsInsideModules) result = (not isStripInBadModule(elementId, condData)); + } else if (h == InDetConditions::SCT_MODULE) { + result = (not condData->isBadModuleId(elementId)); + } else if (h == InDetConditions::SCT_SIDE or h == InDetConditions::DEFAULT) { + result = (not condData->isBadWaferId(elementId)); + } else if (h == InDetConditions::SCT_CHIP) { + result = isGoodChip(elementId, ctx); + } + return result; +} + +bool FaserSCT_ConfigurationConditionsTool::isGood(const Identifier& elementId, InDetConditions::Hierarchy h) const { + const EventContext& ctx{Gaudi::Hive::currentContext()}; + + return isGood(elementId, ctx, h); +} + +// Is a wafer with this IdentifierHash good? +bool FaserSCT_ConfigurationConditionsTool::isGood(const IdentifierHash& hashId, const EventContext& ctx) const { + const Identifier elementId{m_idHelper->wafer_id(hashId)}; + return isGood(elementId, ctx, InDetConditions::SCT_SIDE); +} + +bool FaserSCT_ConfigurationConditionsTool::isGood(const IdentifierHash& hashId) const { + const EventContext& ctx{Gaudi::Hive::currentContext()}; + return isGood(hashId, ctx); +} + +// Is a chip with this Identifier good? +bool FaserSCT_ConfigurationConditionsTool::isGoodChip(const Identifier& stripId, const EventContext& ctx) const { + // This check assumes present SCT. + // Get module number + const Identifier moduleId{m_idHelper->module_id(stripId)}; + if (not moduleId.is_valid()) { + ATH_MSG_WARNING("moduleId obtained from stripId " << stripId << " is invalid."); + return false; + } + + // badChips word for the module + const unsigned int v_badChips{badChips(moduleId, ctx)}; + // badChips holds 12 bits. + // bit 0 (LSB) is chip 0 for side 0. + // bit 5 is chip 5 for side 0. + // bit 6 is chip 6 for side 1. + // bit 11 is chip 11 for side 1. + + // If there is no bad chip, this check is done. + if (v_badChips==0) return true; + + const int side{m_idHelper->side(stripId)}; + // Check the six chips on the side + // 0x3F = 0000 0011 1111 + // 0xFC0 = 1111 1100 0000 + // If there is no bad chip on the side, this check is done. + if ((side==0 and (v_badChips & 0x3F)==0) or (side==1 and (v_badChips & 0xFC0)==0)) return true; + + int chip{getChip(stripId, ctx)}; + if (chip<0 or chip>=12) { + ATH_MSG_WARNING("chip number is invalid: " << chip); + return false; + } + + // Check if the chip is bad + const bool badChip{static_cast<bool>(v_badChips & (1<<chip))}; + + return (not badChip); +} + +// Check if a strip is within a bad module +bool FaserSCT_ConfigurationConditionsTool::isStripInBadModule(const Identifier& stripId, const FaserSCT_ConfigurationCondData* condData) const { + if (condData==nullptr) { + ATH_MSG_ERROR("In isStripInBadModule, FaserSCT_ConfigurationCondData pointer cannot be retrieved"); + return true; + } + + const Identifier moduleId(m_idHelper->module_id(m_idHelper->wafer_id(stripId))); + return condData->isBadModuleId(moduleId); +} + +// Check if a wafer is within a bad module +bool FaserSCT_ConfigurationConditionsTool::isWaferInBadModule(const Identifier& waferId, const EventContext& ctx) const { + const FaserSCT_ConfigurationCondData* condData{getCondData(ctx)}; + if (condData==nullptr) { + ATH_MSG_ERROR("In isWaferInBadModule, FaserSCT_ConfigurationCondData pointer cannot be retrieved"); + return true; + } + + const Identifier moduleId{m_idHelper->module_id(waferId)}; + return condData->isBadModuleId(moduleId); +} + +// Find the chip number containing a particular strip Identifier +int FaserSCT_ConfigurationConditionsTool::getChip(const Identifier& stripId, const EventContext& ctx) const { + // Find side and strip number + const int side{m_idHelper->side(stripId)}; + int strip{m_idHelper->strip(stripId)}; + + // Check for swapped readout direction + const IdentifierHash waferHash{m_idHelper->wafer_hash(m_idHelper->wafer_id(stripId))}; + const TrackerDD::SiDetectorElement* pElement{getDetectorElement(waferHash, ctx)}; + if (pElement==nullptr) { + ATH_MSG_FATAL("Element pointer is nullptr in 'badStrips' method"); + return invalidChipNumber; + } + strip = (pElement->swapPhiReadoutDirection()) ? lastStrip - strip: strip; + + // Find chip number + return (side==0 ? strip/stripsPerChip : strip/stripsPerChip + 6); +} + +int FaserSCT_ConfigurationConditionsTool::getChip(const Identifier& stripId) const { + const EventContext& ctx{Gaudi::Hive::currentContext()}; + return getChip(stripId, ctx); +} + +const std::set<Identifier>* FaserSCT_ConfigurationConditionsTool::badModules(const EventContext& ctx) const { + const FaserSCT_ConfigurationCondData* condData{getCondData(ctx)}; + if (condData==nullptr) { + ATH_MSG_ERROR("In badModules, FaserSCT_ConfigurationCondData pointer cannot be retrieved"); + return nullptr; + } + + return condData->getBadModuleIds(); +} + +const std::set<Identifier>* FaserSCT_ConfigurationConditionsTool::badModules() const { + const EventContext& ctx{Gaudi::Hive::currentContext()}; + return badModules(ctx); +} + +void FaserSCT_ConfigurationConditionsTool::badStrips(const Identifier& moduleId, std::set<Identifier>& strips, const EventContext& ctx, bool ignoreBadModules, bool ignoreBadChips) const { + const FaserSCT_ConfigurationCondData* condData{getCondData(ctx)}; + if (condData==nullptr) { + ATH_MSG_ERROR("In badStrips, FaserSCT_ConfigurationCondData pointer cannot be retrieved"); + return; + } + + // Bad strips for a given module + if (ignoreBadModules) { + // Ignore strips in bad modules + if (condData->isBadModuleId(moduleId)) return; + } + + for (const Identifier& badStripId: *(condData->getBadStripIds())) { + if (ignoreBadChips) { + // Ignore strips in bad chips + const int chip{getChip(badStripId)}; + if (chip!=invalidChipNumber) { + unsigned int chipStatusWord{condData->getBadChips(moduleId)}; + if ((chipStatusWord & (1 << chip)) != 0) continue; + } + } + if (m_idHelper->module_id(m_idHelper->wafer_id(badStripId)) == moduleId) strips.insert(badStripId); + } +} + +void FaserSCT_ConfigurationConditionsTool::badStrips(const Identifier& moduleId, std::set<Identifier>& strips, bool ignoreBadModules, bool ignoreBadChips) const { + const EventContext& ctx{Gaudi::Hive::currentContext()}; + return badStrips(moduleId, strips, ctx, ignoreBadModules, ignoreBadChips); +} + +std::pair<bool, bool> FaserSCT_ConfigurationConditionsTool::badLinks(const IdentifierHash& hash, const EventContext& ctx) const { + // Bad links for a given module + // Bad convetion is used. true is for good link and false is for bad link... + const FaserSCT_ConfigurationCondData* condData{getCondData(ctx)}; + if (condData==nullptr) { + ATH_MSG_ERROR("In badLinks, FaserSCT_ConfigurationCondData pointer cannot be retrieved"); + return std::pair<bool, bool>{false, false}; + } + + return condData->areBadLinks(hash); +} + +std::pair<bool, bool> FaserSCT_ConfigurationConditionsTool::badLinks(const IdentifierHash& hash) const { + const EventContext& ctx{Gaudi::Hive::currentContext()}; + return badLinks(hash, ctx); +} + +const std::map<IdentifierHash, std::pair<bool, bool>>* FaserSCT_ConfigurationConditionsTool::badLinks(const EventContext& ctx) const { + const FaserSCT_ConfigurationCondData* condData{getCondData(ctx)}; + if (condData==nullptr) { + ATH_MSG_ERROR("In badLinks, FaserSCT_ConfigurationCondData pointer cannot be retrieved"); + return nullptr; + } + + return condData->getBadLinks(); +} + +const std::map<IdentifierHash, std::pair<bool, bool>>* FaserSCT_ConfigurationConditionsTool::badLinks() const { + const EventContext& ctx{Gaudi::Hive::currentContext()}; + return badLinks(ctx); +} + +const std::map<Identifier, unsigned int>* FaserSCT_ConfigurationConditionsTool::badChips(const EventContext& ctx) const { + const FaserSCT_ConfigurationCondData* condData{getCondData(ctx)}; + if (condData==nullptr) { + ATH_MSG_ERROR("In badChips, FaserSCT_ConfigurationCondData pointer cannot be retrieved"); + return nullptr; + } + + return condData->getBadChips(); +} + +const std::map<Identifier, unsigned int>* FaserSCT_ConfigurationConditionsTool::badChips() const { + const EventContext& ctx{Gaudi::Hive::currentContext()}; + return badChips(ctx); +} + +unsigned int FaserSCT_ConfigurationConditionsTool::badChips(const Identifier& moduleId, const EventContext& ctx) const { + // Bad chips for a given module + const FaserSCT_ConfigurationCondData* condData{getCondData(ctx)}; + if (condData==nullptr) { + ATH_MSG_ERROR("In badChips, FaserSCT_ConfigurationCondData pointer cannot be retrieved"); + return 0xFFF; // 12 bad chips + } + + return condData->getBadChips(moduleId); +} + +unsigned int FaserSCT_ConfigurationConditionsTool::badChips(const Identifier& moduleId) const { + const EventContext& ctx{Gaudi::Hive::currentContext()}; + return badChips(moduleId, ctx); +} +void +FaserSCT_ConfigurationConditionsTool::badStrips(std::set<Identifier>& strips, const EventContext& ctx, bool ignoreBadModules, bool ignoreBadChips) const { + const FaserSCT_ConfigurationCondData* condData{getCondData(ctx)}; + if (condData==nullptr) { + ATH_MSG_ERROR("In badStrips, FaserSCT_ConfigurationCondData pointer cannot be retrieved"); + return; + } + + if (!ignoreBadModules and !ignoreBadChips) { + std::copy(condData->getBadStripIds()->begin(), condData->getBadStripIds()->end(), std::inserter(strips,strips.begin())); + return; + } + for (const Identifier& badStripId: *(condData->getBadStripIds())) { + const Identifier moduleId{m_idHelper->module_id(m_idHelper->wafer_id(badStripId))}; + // Ignore strips in bad modules + if (ignoreBadModules) { + if (condData->isBadModuleId(moduleId)) continue; + } + // Ignore strips in bad chips + if (ignoreBadChips) { + const int chip{getChip(badStripId)}; + if (chip!=invalidChipNumber) { + unsigned int chipStatusWord{condData->getBadChips(moduleId)}; + if ((chipStatusWord & (1 << chip)) != 0) continue; + } + } + strips.insert(badStripId); + } +} + +void +FaserSCT_ConfigurationConditionsTool::badStrips(std::set<Identifier>& strips, bool ignoreBadModules, bool ignoreBadChips) const { + const EventContext& ctx{Gaudi::Hive::currentContext()}; + badStrips(strips, ctx, ignoreBadModules, ignoreBadChips); +} + +const FaserSCT_ConfigurationCondData* +FaserSCT_ConfigurationConditionsTool::getCondData(const EventContext& ctx) const { + SG::ReadCondHandle<FaserSCT_ConfigurationCondData> condData{m_condKey, ctx}; + if (!condData.isValid()) { + ATH_MSG_ERROR("can't retrieve condData"); + } + return condData.retrieve(); +} + +const TrackerDD::SiDetectorElement* FaserSCT_ConfigurationConditionsTool::getDetectorElement(const IdentifierHash& waferHash, const EventContext& ctx) const { + SG::ReadCondHandle<TrackerDD::SiDetectorElementCollection> condData{m_SCTDetEleCollKey, ctx}; + if (not condData.isValid()) return nullptr; + return condData->getDetectorElement(waferHash); +} diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConfigurationConditionsTool.h b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConfigurationConditionsTool.h new file mode 100644 index 0000000000000000000000000000000000000000..eef808511f576b6c71d7dbb4d0ac51982dc51a85 --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_ConfigurationConditionsTool.h @@ -0,0 +1,109 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file FaserSCT_ConfigurationConditionsTool.h + * header file for tool which reads SCT configuration from database + * @author shaun.roe@cern.ch, gwilliam@mail.cern.ch +**/ + + +#ifndef FASERSCT_CONFIGURATIONCONDITIONSTOOL_H +#define FASERSCT_CONFIGURATIONCONDITIONSTOOL_H + +// Athena includes +#include "AthenaBaseComps/AthAlgTool.h" +#include "FaserSCT_ConditionsTools/ISCT_ConfigurationConditionsTool.h" + +#include "TrackerReadoutGeometry/SiDetectorElement.h" +#include "FaserSCT_ConditionsData/FaserSCT_ConfigurationCondData.h" +#include "StoreGate/ReadCondHandleKey.h" + +// Gaudi includes +#include "GaudiKernel/ToolHandle.h" +#include "GaudiKernel/EventContext.h" +#include "GaudiKernel/ContextSpecificPtr.h" +#include "TrackerReadoutGeometry/SiDetectorElementCollection.h" + +// STL includes +#include <string> + +// Forward declarations +class FaserSCT_ID; + +/** + * @class FaserSCT_ConfigurationConditionsTool + * Tool which reads FaserSCT_Configuration from the database + * Deals with bad modules, bad links, strips out of the readout and bad strips +**/ + +class FaserSCT_ConfigurationConditionsTool: public extends<AthAlgTool, ISCT_ConfigurationConditionsTool> { + public: + + //@name Tool methods + //@{ + FaserSCT_ConfigurationConditionsTool(const std::string& type, const std::string& name, const IInterface* parent); + virtual ~FaserSCT_ConfigurationConditionsTool() = default; + virtual StatusCode initialize() override; + //@} + + /**Can the tool report about the given component? (chip, module...)*/ + virtual bool canReportAbout(InDetConditions::Hierarchy h) const override; + + /**Is the detector element good?*/ + virtual bool isGood(const Identifier& elementId, InDetConditions::Hierarchy h=InDetConditions::DEFAULT) const override; + virtual bool isGood(const Identifier& elementId, const EventContext& ctx, InDetConditions::Hierarchy h=InDetConditions::DEFAULT) const override; + + /**Is it good?, using wafer hash*/ + virtual bool isGood(const IdentifierHash& hashId) const override; + virtual bool isGood(const IdentifierHash& hashId, const EventContext& ctx) const override; + + /**List of bad modules*/ + virtual const std::set<Identifier>* badModules(const EventContext& ctx) const override; + virtual const std::set<Identifier>* badModules() const override; + /**List of bad strips*/ + virtual void badStrips(std::set<Identifier>& strips, const EventContext& ctx, bool ignoreBadModules=false, bool ignoreBadChips=false) const override; + virtual void badStrips(std::set<Identifier>& strips, bool ignoreBadModules=false, bool ignoreBadChips=false) const override; + /**List of bad strips for a given module*/ + virtual void badStrips(const Identifier& moduleId, std::set<Identifier>& strips, const EventContext& ctx, bool ignoreBadModules=false, bool ignoreBadChips=false) const override; + virtual void badStrips(const Identifier& moduleId, std::set<Identifier>& strips, bool ignoreBadModules=false, bool ignoreBadChips=false) const override; + /**List of bad links*/ + virtual std::pair<bool, bool> badLinks(const IdentifierHash& hash, const EventContext& ctx) const override; + virtual std::pair<bool, bool> badLinks(const IdentifierHash& hash) const override; + /**Bad links for a given module*/ + virtual const std::map<IdentifierHash, std::pair<bool, bool>>* badLinks(const EventContext& ctx) const override; + virtual const std::map<IdentifierHash, std::pair<bool, bool>>* badLinks() const override; + /**List of bad chips*/ + virtual const std::map<Identifier, unsigned int>* badChips(const EventContext& ctx) const override; + virtual const std::map<Identifier, unsigned int>* badChips() const override; + /**Bad chips for a given module*/ + virtual unsigned int badChips(const Identifier& moduleId, const EventContext& ctx) const override; + virtual unsigned int badChips(const Identifier& moduleId) const override; + /** Get the chip number containing a particular strip*/ + virtual int getChip(const Identifier& stripId, const EventContext& ctx) const override; + virtual int getChip(const Identifier& stripId) const override; + + private: + SG::ReadCondHandleKey<FaserSCT_ConfigurationCondData> m_condKey{this, "CondKeyConfig", "FaserSCT_ConfigurationCondData", "SCT DAQ configuration"}; + SG::ReadCondHandleKey<TrackerDD::SiDetectorElementCollection> m_SCTDetEleCollKey{this, "SCTDetEleCollKey", "SCT_DetectorElementCollection", "Key of SiDetectorElementCollection for SCT"}; + + const FaserSCT_ID* m_idHelper{nullptr}; + BooleanProperty m_checkStripsInsideModules{this, "checkStripsInsideModule", true, " Do we want to check if a strip is bad because it is inside a bad module"}; + + /** Is a strip within a bad module*/ + bool isStripInBadModule(const Identifier& stripId, const FaserSCT_ConfigurationCondData*) const; + /** Is a wafer in a bad module*/ + bool isWaferInBadModule(const Identifier& waferId, const EventContext& ctx) const; + + /**Is a chip with this Identifier good?*/ + bool isGoodChip(const Identifier& stripId, const EventContext& ctx) const; + + /** enum for constants*/ + enum {stripsPerChip=128, lastStrip=767, invalidChipNumber=-1}; + + const FaserSCT_ConfigurationCondData* getCondData(const EventContext& ctx) const; + const TrackerDD::SiDetectorElement* getDetectorElement(const IdentifierHash& waferHash, const EventContext& ctx) const; +}; + +#endif //FASERSCT_CONFIGURATIONCONDITIONSTOOL_H diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_NoisyStripTool.cxx b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_NoisyStripTool.cxx deleted file mode 100644 index 0e2b5c444a8839e0421a073fd79b0737a89ed635..0000000000000000000000000000000000000000 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_NoisyStripTool.cxx +++ /dev/null @@ -1,113 +0,0 @@ -/* - Copyright (C) 2002-2019 CERN for the benefit of the ATLAS and FASER collaborations -*/ - -#include "FaserSCT_NoisyStripTool.h" - - -FaserSCT_NoisyStripTool::FaserSCT_NoisyStripTool (const std::string& type, - const std::string& name, const IInterface* parent) : - base_class(type, name, parent) {} - - -StatusCode FaserSCT_NoisyStripTool::initialize() { - return StatusCode::SUCCESS; -} - - -StatusCode FaserSCT_NoisyStripTool::finalize() { - return StatusCode::SUCCESS; -} - - -std::map<std::pair<int, int>, double> -FaserSCT_NoisyStripTool::getNoisyStrips(const EventContext& /*ctx*/) const { - // TODO fix hard-coded definition of noisy strip and read from NoisyPixels.xml database - std::map<std::pair<int, int>, double> noisyStrips {}; - // noisyStrips.insert({std::make_pair(10, 150), 0.25279282895176935}); - // noisyStrips.insert({std::make_pair(20, 155), 0.9950721341449819}); - // noisyStrips.insert({std::make_pair(38, 41), 1.0}); - // noisyStrips.insert({std::make_pair(48, 643), 0.6209110977322898}); - // noisyStrips.insert({std::make_pair(49, 69), 1.0}); - // noisyStrips.insert({std::make_pair(49, 508), 0.1250027872544429}); - // noisyStrips.insert({std::make_pair(49, 660), 1.0}); - // noisyStrips.insert({std::make_pair(61, 350), 0.3669587709322809}); - // noisyStrips.insert({std::make_pair(61, 351), 0.32956496532655477}); - // noisyStrips.insert({std::make_pair(64, 287), 1.0}); - // noisyStrips.insert({std::make_pair(65, 323), 0.5987691484380226}); - // noisyStrips.insert({std::make_pair(67, 147), 1.0}); - // noisyStrips.insert({std::make_pair(67, 207), 1.0}); - // noisyStrips.insert({std::make_pair(67, 346), 0.6463977523580173}); - // noisyStrips.insert({std::make_pair(83, 114), 0.9968113809173412}); - // noisyStrips.insert({std::make_pair(86, 544), 0.46609583695676415}); - // noisyStrips.insert({std::make_pair(96, 79), 1.0}); - // noisyStrips.insert({std::make_pair(96, 183), 1.0}); - // noisyStrips.insert({std::make_pair(97, 550), 1.0}); - // noisyStrips.insert({std::make_pair(100, 215), 1.0}); - // noisyStrips.insert({std::make_pair(100, 610), 1.0}); - // noisyStrips.insert({std::make_pair(130, 722), 1.0}); - // noisyStrips.insert({std::make_pair(132, 297), 0.8765803732691151}); - // noisyStrips.insert({std::make_pair(144, 597), 1.0}); - // noisyStrips.insert({std::make_pair(144, 665), 1.0}); - // noisyStrips.insert({std::make_pair(145, 9), 1.0}); - // noisyStrips.insert({std::make_pair(145, 492), 1.0}); - // noisyStrips.insert({std::make_pair(145, 566), 1.0}); - // noisyStrips.insert({std::make_pair(146, 267), 1.0}); - // noisyStrips.insert({std::make_pair(147, 393), 0.4758623765246282}); - // noisyStrips.insert({std::make_pair(147, 394), 0.5172252324570206}); - // noisyStrips.insert({std::make_pair(147, 395), 0.5058532343300556}); - // noisyStrips.insert({std::make_pair(147, 396), 0.5272816464869445}); - // noisyStrips.insert({std::make_pair(147, 397), 0.4782036702566504}); - // noisyStrips.insert({std::make_pair(147, 398), 0.5092202376970589}); - // noisyStrips.insert({std::make_pair(147, 399), 0.4811247129127924}); - // noisyStrips.insert({std::make_pair(147, 600), 1.0}); - // noisyStrips.insert({std::make_pair(151, 417), 0.6434767097018753}); - // noisyStrips.insert({std::make_pair(152, 698), 0.2651013445715433}); - // noisyStrips.insert({std::make_pair(152, 699), 0.3080250629919504}); - // noisyStrips.insert({std::make_pair(153, 396), 0.4862532610876982}); - // noisyStrips.insert({std::make_pair(153, 397), 0.44509108747519344}); - // noisyStrips.insert({std::make_pair(154, 620), 0.4075634936562089}); - // noisyStrips.insert({std::make_pair(154, 621), 0.5001449372310299}); - // noisyStrips.insert({std::make_pair(155, 146), 0.4566637679220461}); - // noisyStrips.insert({std::make_pair(155, 147), 0.4941021695988584}); - // noisyStrips.insert({std::make_pair(158, 737), 0.5486208665016612}); - // noisyStrips.insert({std::make_pair(158, 738), 0.5591901353490758}); - // noisyStrips.insert({std::make_pair(158, 739), 0.5590786451713604}); - // noisyStrips.insert({std::make_pair(160, 7), 1.0}); - // noisyStrips.insert({std::make_pair(161, 763), 1.0}); - // noisyStrips.insert({std::make_pair(164, 613), 1.0}); - // noisyStrips.insert({std::make_pair(167, 175), 1.0}); - // noisyStrips.insert({std::make_pair(170, 90), 0.48484848484848486}); - // noisyStrips.insert({std::make_pair(170, 91), 0.4570874305973644}); - // noisyStrips.insert({std::make_pair(173, 18), 1.0}); - // noisyStrips.insert({std::make_pair(173, 484), 1.0}); - // noisyStrips.insert({std::make_pair(174, 230), 1.0}); - // noisyStrips.insert({std::make_pair(174, 530), 1.0}); - // noisyStrips.insert({std::make_pair(175, 683), 1.0}); - // noisyStrips.insert({std::make_pair(176, 418), 1.0}); - // noisyStrips.insert({std::make_pair(177, 149), 1.0}); - // noisyStrips.insert({std::make_pair(177, 345), 1.0}); - // noisyStrips.insert({std::make_pair(178, 214), 1.0}); - // noisyStrips.insert({std::make_pair(178, 508), 0.5192097576203537}); - // noisyStrips.insert({std::make_pair(178, 673), 0.6028496889424042}); - // noisyStrips.insert({std::make_pair(179, 651), 0.999977701964457}); - // noisyStrips.insert({std::make_pair(182, 525), 0.5044707561263853}); - // noisyStrips.insert({std::make_pair(182, 526), 0.5083506143108792}); - // noisyStrips.insert({std::make_pair(185, 493), 0.42738644725399694}); - // noisyStrips.insert({std::make_pair(185, 494), 0.43757664949717934}); - // noisyStrips.insert({std::make_pair(187, 427), 0.9203737150757019}); - // noisyStrips.insert({std::make_pair(188, 696), 0.6201752625593685}); - // noisyStrips.insert({std::make_pair(188, 752), 1.0}); - // noisyStrips.insert({std::make_pair(189, 249), 0.1250027872544429}); - // noisyStrips.insert({std::make_pair(189, 338), 0.25925925925925924}); - // noisyStrips.insert({std::make_pair(191, 170), 1.0}); - // noisyStrips.insert({std::make_pair(191, 406), 1.0}); - return noisyStrips; -} - - -std::map<std::pair<int, int>, double> -FaserSCT_NoisyStripTool::getNoisyStrips() const { - const EventContext& ctx{Gaudi::Hive::currentContext()}; - return getNoisyStrips(ctx); -} diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_NoisyStripTool.h b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_NoisyStripTool.h deleted file mode 100644 index c554620885b0cf28beb5c3d93c6d6fa31377e3a4..0000000000000000000000000000000000000000 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/FaserSCT_NoisyStripTool.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - Copyright (C) 2002-2022 CERN for the benefit of the ATLAS and CERN collaborations -*/ - -#ifndef FASERSCT_NOISY_STRIP_TOOL -#define FASERSCT_NOISY_STRIP_TOOL - -#include "AthenaBaseComps/AthAlgTool.h" -#include "FaserSCT_ConditionsTools/ISCT_NoisyStripTool.h" -#include "GaudiKernel/ICondSvc.h" -#include "GaudiKernel/EventContext.h" - - -class FaserSCT_NoisyStripTool: public extends<AthAlgTool, ISCT_NoisyStripTool> { -public: - FaserSCT_NoisyStripTool(const std::string& type, const std::string& name, const IInterface* parent); - virtual ~FaserSCT_NoisyStripTool() = default; - virtual StatusCode initialize() override; - virtual StatusCode finalize() override; - - virtual std::map<std::pair<int, int>, double> getNoisyStrips(const EventContext& ctx) const override; - virtual std::map<std::pair<int, int>, double> getNoisyStrips() const override; -}; - -#endif // FASERSCT_NOISY_STRIP_TOOL diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/components/FaserSCT_ConditionsTools_entries.cxx b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/components/FaserSCT_ConditionsTools_entries.cxx index adad7ee37337a3d347bedc9c1f5b28cf731fb837..ae50a450b23170de19a520750fac0c9893f29ec6 100644 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/components/FaserSCT_ConditionsTools_entries.cxx +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/src/components/FaserSCT_ConditionsTools_entries.cxx @@ -20,7 +20,8 @@ // #include "../SCT_StripVetoTool.h" // #include "../SCT_TdaqEnabledTool.h" #include "../FaserSCT_CableMappingTool.h" -#include "../FaserSCT_NoisyStripTool.h" +#include "../FaserSCT_ConditionsSummaryTool.h" +#include "../FaserSCT_ConfigurationConditionsTool.h" // DECLARE_COMPONENT( SCT_ByteStreamErrorsTool ) // DECLARE_COMPONENT( SCT_ChargeTrappingTool ) @@ -44,4 +45,5 @@ DECLARE_COMPONENT( FaserSCT_SiliconConditionsTool ) // DECLARE_COMPONENT( SCT_StripVetoTool ) // DECLARE_COMPONENT( SCT_TdaqEnabledTool ) DECLARE_COMPONENT( FaserSCT_CableMappingTool ) -DECLARE_COMPONENT( FaserSCT_NoisyStripTool ) +DECLARE_COMPONENT( FaserSCT_ConditionsSummaryTool ) +DECLARE_COMPONENT( FaserSCT_ConfigurationConditionsTool ) diff --git a/Tracker/TrackerDetDescr/FaserSCT_GeoModel/python/FaserSCT_GeoModelConfig.py b/Tracker/TrackerDetDescr/FaserSCT_GeoModel/python/FaserSCT_GeoModelConfig.py index 26dff64f141ad9f22ff4ac430ef2fbdf98065f7e..038c8c38bb3c6d0daed962726f310867873a4c02 100644 --- a/Tracker/TrackerDetDescr/FaserSCT_GeoModel/python/FaserSCT_GeoModelConfig.py +++ b/Tracker/TrackerDetDescr/FaserSCT_GeoModel/python/FaserSCT_GeoModelConfig.py @@ -2,7 +2,7 @@ from AthenaConfiguration.ComponentFactory import CompFactory from AthenaConfiguration.Enums import ProductionStep -from IOVDbSvc.IOVDbSvcConfig import addFoldersSplitOnline +from IOVDbSvc.IOVDbSvcConfig import addFolders, addFoldersSplitOnline def FaserSCT_GeometryCfg( flags ): from FaserGeoModel.GeoModelConfig import GeoModelCfg @@ -33,10 +33,11 @@ def FaserSCT_GeometryCfg( flags ): # acc.merge(addFoldersSplitOnline(flags,"INDET","/Indet/Onl/AlignL3","/Indet/AlignL3",className="AlignableTransformContainer")) print("FaserSCT dynamic align flag is not supported!") else: + print("Override Alignment dbname to OFLP200, fix this when alignment available in CONDBR3") if flags.Common.Project != "AthSimulation" and (flags.Common.ProductionStep != ProductionStep.Simulation or flags.Overlay.DataOverlay): - acc.merge(addFoldersSplitOnline(flags,"SCT","/Tracker/Onl/Align","/Tracker/Align",className="AlignableTransformContainer")) + acc.merge(addFolders(flags,"/Tracker/Align", "SCT_OFL", className="AlignableTransformContainer", db="OFLP200")) else: - acc.merge(addFoldersSplitOnline(flags,"SCT","/Tracker/Onl/Align","/Tracker/Align")) + acc.merge(addFolders(flags, "/Tracker/Align", "SCT_OFL", db="OFLP200")) if flags.Common.Project != "AthSimulation": # Protection for AthSimulation builds if flags.Common.ProductionStep != ProductionStep.Simulation or flags.Overlay.DataOverlay: FaserSCT_AlignCondAlg = CompFactory.FaserSCT_AlignCondAlg diff --git a/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerByteStreamCnv.cxx b/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerByteStreamCnv.cxx index 87bb2437c36c0d0c4f2cbe0cbfb4f3a1270c1e1a..4d7b497ce61ad48c1ab20e8b32c87a6eaa6d7851 100644 --- a/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerByteStreamCnv.cxx +++ b/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerByteStreamCnv.cxx @@ -28,7 +28,6 @@ TrackerByteStreamCnv::TrackerByteStreamCnv(ISvcLocator* svcloc) , m_tool("TrackerDataDecoderTool") , m_mappingTool("FaserSCT_CableMappingTool") , m_rdpSvc("FaserROBDataProviderSvc", "TrackerByteStreamCnv") - , m_noisyStripTool("FaserSCT_NoisyStripTool") , m_detStoreSvc("StoreGateSvc/DetectorStore", "TrackerByteStreamCnv") { } @@ -49,7 +48,6 @@ StatusCode TrackerByteStreamCnv::initialize() CHECK(m_rdpSvc.retrieve()); CHECK(m_tool.retrieve()); CHECK(m_mappingTool.retrieve()); - CHECK(m_noisyStripTool.retrieve()); ATH_CHECK(m_detStoreSvc.retrieve()); ATH_CHECK(m_detStoreSvc->retrieve(m_sctID, "FaserSCT_ID")); @@ -101,12 +99,9 @@ StatusCode TrackerByteStreamCnv::createObj(IOpaqueAddress* pAddr, DataObject*& p auto mapping = m_mappingTool->getCableMapping(); ATH_MSG_DEBUG("Cable mapping contains " << mapping.size() << " entries"); - auto noisyStrips = m_noisyStripTool->getNoisyStrips(); - ATH_MSG_DEBUG(noisyStrips.size() << " noisy strips"); - // Convert raw data into this container - CHECK( m_tool->convert(re, cont, key, mapping, noisyStrips) ); + CHECK( m_tool->convert(re, cont, key, mapping) ); pObj = SG::asStorable(cont); diff --git a/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerByteStreamCnv.h b/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerByteStreamCnv.h index 86262fbc244530aecac352d968a13d54d0943728..161bcd9740c6cb0d3c4f04e1bc03dfce7e613493 100644 --- a/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerByteStreamCnv.h +++ b/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerByteStreamCnv.h @@ -15,7 +15,6 @@ #include "AthenaBaseComps/AthMessaging.h" #include "FaserByteStreamCnvSvcBase/FaserByteStreamAddress.h" #include "FaserSCT_ConditionsTools/ISCT_CableMappingTool.h" -#include "FaserSCT_ConditionsTools/ISCT_NoisyStripTool.h" class TrackerDataDecoderTool; class IFaserROBDataProviderSvc; @@ -45,7 +44,6 @@ private: ServiceHandle<StoreGateSvc> m_detStoreSvc; ToolHandle<TrackerDataDecoderTool> m_tool; ToolHandle<ISCT_CableMappingTool> m_mappingTool; - ToolHandle<ISCT_NoisyStripTool> m_noisyStripTool; const FaserSCT_ID* m_sctID{nullptr}; }; diff --git a/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerDataDecoderTool.cxx b/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerDataDecoderTool.cxx index 612978a068a931bf4a85b267537d89b626ba53e5..f8ec8ce1de921bb86ae9ef704a0dce1084a33947 100644 --- a/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerDataDecoderTool.cxx +++ b/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerDataDecoderTool.cxx @@ -81,8 +81,7 @@ StatusCode TrackerDataDecoderTool::convert(const DAQFormats::EventFull* re, FaserSCT_RDO_Container* container, std::string key, - const std::map<int, std::pair<int, int> >& cableMapping, - const std::map<std::pair<int, int>, double>& noisyStrips) + const std::map<int, std::pair<int, int> >& cableMapping) { ATH_MSG_DEBUG("TrackerDataDecoderTool::convert()"); @@ -233,12 +232,6 @@ TrackerDataDecoderTool::convert(const DAQFormats::EventFull* re, ATH_MSG_ERROR("Invalid strip number on side: " << stripOnSide); continue; } - // check if strip is noisy - auto it = noisyStrips.find(std::make_pair(waferHash, stripOnSide)); - if (it != noisyStrips.end() && it->second >= m_occupancyCut) { - ATH_MSG_VERBOSE("Mask wafer " << waferHash << ", strip " << stripOnSide << " with an occupancy of " << it->second); - continue; - } Identifier digitID {m_sctID->strip_id(id, stripOnSide)}; int errors{0}; int groupSize{1}; diff --git a/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerDataDecoderTool.h b/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerDataDecoderTool.h index 3bc337276a3f8eaba28fa51d90f7eb4f903592c7..fbe14e215a19ecf9f009197270161c5f68ae9e87 100644 --- a/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerDataDecoderTool.h +++ b/Tracker/TrackerEventCnv/TrackerByteStream/src/TrackerDataDecoderTool.h @@ -32,8 +32,7 @@ class TrackerDataDecoderTool : public AthAlgTool { virtual StatusCode finalize(); StatusCode convert(const DAQFormats::EventFull* re, FaserSCT_RDO_Container* cont, std::string key, - const std::map<int, std::pair<int, int> >& cableMapping, - const std::map<std::pair<int, int>, double>& noisyStrips); + const std::map<int, std::pair<int, int> >& cableMapping); private: const FaserSCT_ID* m_sctID{nullptr}; @@ -43,7 +42,6 @@ private: "ModuleMap", {7, 2, 5, 0, 3, 6, 1, 4}, "Mapping from online to offline module numbers" }; - Gaudi::Property<double> m_occupancyCut {this, "OccupancyCut", 0.1, "Mask strips with an occupancy larger than the OccupancyCut"}; // Gaudi::Property<uint32_t> m_trb0Station { this, "Trb0StationNumber", 1, "Station number for TRB #0" }; }; diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/CMakeLists.txt b/Tracker/TrackerRecAlgs/NoisyStripFinder/CMakeLists.txt index 29e80551e078ae78985801f204169932da2dcb94..5a85e55131b3ef638671c33f5ec3f742bf48203d 100644 --- a/Tracker/TrackerRecAlgs/NoisyStripFinder/CMakeLists.txt +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/CMakeLists.txt @@ -17,5 +17,5 @@ atlas_add_component( NoisyStripFinder atlas_install_python_modules( python/*.py ) -#atlas_install_scripts( test/*.py ) +atlas_install_scripts( share/*.py share/*.sh ) diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/README.md b/Tracker/TrackerRecAlgs/NoisyStripFinder/README.md new file mode 100644 index 0000000000000000000000000000000000000000..407b1d255379dd7d6ecf2b3c9ae5f9e016a1192b --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/README.md @@ -0,0 +1,72 @@ +Noisy strips are now part of the conditions database and are used in default reco + +To update the database, the following steps should be used. + +1) Determine the runs to update. +A handy utility to find all runs of a given type is findFaserRunsByType.py: + +`findFaserRunsByType.py -t Physics -o physics_runs.txt 7730-8370` + +2) Submit jobs to create the noisy strip DB for each run +From a release directory, best to create a subdirectory for this and then: + +`submitNoisyStripJobs.py --release .. physics_runs.txt` + +Can also just specify run numbers (or a range) instead of a text file of runs + +If you want to do this interactively, this script submits jobs to run the following: +`NoisyStripFinderJob.py` +`makeNoisyStripDB.py` + +3) Check that all of the jobs finished successfully (or can check individual runs) + +`checkNoisyStripJobs.py physics_runs.txt` + +This can also write out a file of runs to submit again + +4) Check that the actual noisy strips found make sense +This reads the individual DB files in each subdirectory, can also specify specific runs + +`checkNoisyStripDB.py physics_runs.txt` + +5) Merge the individual runs into a single DB +Note this script writes the individual runs with open-ended IOVs. +This means the last run merged will be used for all later data until the DB is updated again. +So if you are merging multiple times (with the --append option), it is important to do ths in chronological order. + +`mergeNoisyStripDB.py physics_runs.txt` + +Note, this can be slow. Use --verbose to watch the progress. + +6) Test the database +The resulting database by default has the name noisy_strips.db. +This can be copied to the data/sqlite200 subdirectory of the working directory and reco jobs will use this. +Check here for details: +https://gitlab.cern.ch/faser/calypso/-/blob/master/Database/ConnectionManagement/FaserAuthentication/data/dblookup.xml + +7) Merge with production DB +This updated noisy strips folder /SCT/DAQ/NoisyStrips now needs to be merged into the production DB. +First copy the current DB from CVMFS to some local directory. + +`cp /cvmfs/faser.cern.ch/repo/sw/database/DBRelease/current/sqlite200/ALLP200.db .` + +Next, use AtlCoolCopy to merge the updates into this file: + +`AtlCoolCopy "sqlite://;schema=noisy_strips.db;dbname=CONDBR3" "sqlite://;schema=ALLP200.db;dbname=CONDBR3" ` + +This can also be slow. + +Finally, the ALLP200.db file should be installed on cvmfs once everything is verified to be correct. + +Older instructions from Tobias when he was developing this package are here: + +Mask noisy strips: + +1) Run the NoisyStripFinderDbg.py on raw data files + +2) Run makeNoisyStripDB.py on the NoisyStripFinderHist.root produced by the NoisyStripFinderDbg.py script + +3) Edit the path for the COOLOFL_INDET database in Database/ConnectionManagement/FaserAuthentication/data/dblookup.xml to point to the noisy_strips.db produced by the makeNoisyStripDB.py script + +4) Set the checkBadChannels flag to True for the FaserSCT_ClusterizationCfg, e.g. like this + acc.merge(FaserSCT_ClusterizationCfg(ConfigFlags, checkBadChannels=True, DataObjectName="SCT_EDGEMODE_RDOs", ClusterToolTimingPattern="01X")) diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/NoisyStripFinderHist_Analysis.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/NoisyStripFinderHist_Analysis.py old mode 100644 new mode 100755 index 349316b31446434c1ad41f58ba2c269e4ce0a93a..f882e8f303929f4cf34858b852698308aa2ae993 --- a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/NoisyStripFinderHist_Analysis.py +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/NoisyStripFinderHist_Analysis.py @@ -71,7 +71,7 @@ for dictkey in HistDict: sensorhash.text = dictkey bini = 1 while bini <= 768: - if HistDict[dictkey].GetBinContent(bini) >= 0.1 : + if HistDict[dictkey].GetBinContent(bini) >= 0.01: strip = ET.SubElement(sensorhash, "Strip") strip.text = str(bini - 1) # strip number is offset by histogram bin number by 1 because of underflow bin occupancy = ET.SubElement(strip, "Occupancy") diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/NoisyStripFinderJob.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/NoisyStripFinderJob.py new file mode 100755 index 0000000000000000000000000000000000000000..b6dc5c514d05c558310bf1108e505deca1e8098c --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/NoisyStripFinderJob.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +""" + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +""" + +import sys +from AthenaCommon.Logging import log, logging +from AthenaCommon.Constants import DEBUG, VERBOSE, INFO +from AthenaCommon.Configurable import Configurable +from CalypsoConfiguration.AllConfigFlags import ConfigFlags +from CalypsoConfiguration.MainServicesConfig import MainServicesCfg +from AthenaPoolCnvSvc.PoolWriteConfig import PoolWriteCfg +from FaserByteStreamCnvSvc.FaserByteStreamCnvSvcConfig import FaserByteStreamCnvSvcCfg +from TrackerPrepRawDataFormation.TrackerPrepRawDataFormationConfig import FaserSCT_ClusterizationCfg +from NoisyStripFinder.NoisyStripFinderConfig import NoisyStripFinderCfg +import argparse + +parser = argparse.ArgumentParser() +parser.add_argument("file", nargs="+", help="full path to input file") +parser.add_argument("--nevents", "-n", default=-1, type=int, help="Number of events to process") +parser.add_argument("--outfile", "-o", default="NoisyStripFinderHist.root", help="Specify output file") + +parser.add_argument("--verbose", "-v", action="store_true", help="Debug output") +args = parser.parse_args() + +if args.verbose: + log.setLevel(DEBUG) +else: + log.setLevel(INFO) + +Configurable.configurableRun3Behavior = True + +# +filelist = [] +for filename in args.file: + if filename[:22] == "/eos/experiment/faser/": + filelist.append(f"root://eospublic.cern.ch/{filename}") + else: + filelist.append(filename) + +ConfigFlags.Input.Files = args.file +ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" +#ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" +ConfigFlags.IOVDb.DatabaseInstance = "CONDBR3" +ConfigFlags.Input.ProjectName = "data22" +ConfigFlags.Input.isMC = False +ConfigFlags.GeoModel.FaserVersion = "FASERNU-03" +ConfigFlags.Common.isOnline = False +ConfigFlags.GeoModel.Align.Dynamic = False +ConfigFlags.Beam.NumberOfCollisions = 0. +ConfigFlags.Detector.GeometryFaserSCT = True +ConfigFlags.lock() + +acc = MainServicesCfg(ConfigFlags) +acc.merge(PoolWriteCfg(ConfigFlags)) +acc.merge(FaserByteStreamCnvSvcCfg(ConfigFlags, OccupancyCut=-1)) +acc.merge(NoisyStripFinderCfg(ConfigFlags, OutputHistRootName=args.outfile)) + +# Hack to avoid problem with our use of MC databases when isMC = False +replicaSvc = acc.getService("DBReplicaSvc") +replicaSvc.COOLSQLiteVetoPattern = "" +replicaSvc.UseCOOLSQLite = True +replicaSvc.UseCOOLFrontier = False +replicaSvc.UseGeomSQLite = True + +# Don't print out every event +from AthenaConfiguration.ComponentFactory import CompFactory +eventLoop = CompFactory.AthenaEventLoopMgr() +eventLoop.EventPrintoutInterval = 1000 +acc.addService(eventLoop) + +if args.verbose: + acc.foreach_component("*").OutputLevel = VERBOSE + acc.printConfig() +else: + acc.foreach_component("*").OutputLevel = INFO + +sc = acc.run(maxEvents=args.nevents) +print(f"Job finished with {sc.isSuccess()} => {not sc.isSuccess()}") +sys.exit(not sc.isSuccess()) diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/checkNoisyStripDB.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/checkNoisyStripDB.py new file mode 100755 index 0000000000000000000000000000000000000000..6ad335d516206eedff8d298d2eb180e70d4003a4 --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/checkNoisyStripDB.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python3 +# +# Sept 2022, E. Torrence +# +# Script to merge individual run DBs into a master DB +# +# Usage: +# ./mergeNoisyDBRuns.py -h +# +import sys +import argparse +import subprocess + +from pathlib import Path + +from PyCool import cool +from CoolConvUtilities.AtlCoolLib import indirectOpen + +def parse_arguments(): + + description="Script to merge DBs from individual runs into one DB\n" + parser = argparse.ArgumentParser(description, + formatter_class=argparse.RawTextHelpFormatter) + + + parser.add_argument("runs", nargs='+', help="Specify FASER runs or range") + parser.add_argument("--threshold", default="0.01", help="Threshold to calls trip noisy (0.01)") + parser.add_argument("-v", "--verbose", action="store_true", help="Debugging output") + return parser.parse_args() + +# Take a string and turn it into a list of integers +# Can specify single values, ranges, or comma separated lists of both +def parseRunList(runlist): + + run_list = [] + + # Check if this is a file with run numbers + if len(runlist) == 1: + path = Path(runlist[0]) + if path.exists() and path.is_file(): + print(f"Reading runs from {path}") + # Try reading each line as a run number + with path.open() as f: + for line in f.readlines(): + line = line.strip() + if len(line) == 0: continue + if line[0] in ['#', '!']: continue + if not line.isnumeric(): + print(f"Error parsing {line}") + continue + run_list.append(int(line)) + # Done reading file + return(run_list) + elif '-' in runlist[0]: + pass + elif ',' in runlist[0]: + pass + elif not runlist[0].isnumeric(): + print(f"File {path} doesn't exist!") + return run_list + + for string in runlist: + tokens = string.split(',') + + for segment in tokens: + + if len(segment) == 0: continue + + if '-' in segment: # Range of runs + start, end = segment.split('-') + if not start.isnumeric(): + print(f"Found invalid run {start}") + continue + if not end.isnumeric(): + print(f"Found invalid run {end}") + continue + start = int(start) + end = int(end) + run_list.extend(list(range(int(start), int(end)+1))) + + else: + if not segment.isnumeric(): + print(f"Found invalid run {segment}") + continue + run_list.append(int(segment)) + + return(run_list) + +class NoisyRunAnalyzer: + + def __init__(self, verbose=False, threshold=0.01): + self.verbose = verbose + + self.run_dict = {} + self.noise_threshold = threshold + + def addRun(self, runnum): + + if self.verbose: print(f"\nRun {runnum}") + + runstr = f'{runnum:06d}' + infile = Path(f'{runstr}/noisy_{runstr}.db') + if not infile.is_file(): + print(f"{runstr}/noisy_{runstr}.db doesn't exist!") + return + + db_string = f'sqlite://;schema={runstr}/noisy_{runstr}.db;dbname=CONDBR3' + try: + self.db = indirectOpen(db_string, readOnly=True, oracle=False, debug=False) + except Exception as e: + print(e) + return + + # Now read all channels + folder_string = "/SCT/DAQ/NoisyStrips" + try: + self.folder = self.db.getFolder(folder_string) + except Exception as e: + print(e) + return + + if self.folder is None: + print(f"Can't access folder {folder_string} in {db_string}") + return + + channels = cool.ChannelSelection.all() + iov_lo = (runnum<<32) + iov_hi = ((runnum+1)<<32) - 1 + tag = '' + + try: + itr = self.folder.browseObjects(iov_lo, iov_hi, channels, tag) + except Exception as e: + print(e) + return + + rd = self.run_dict.get(runnum, None) + if rd is None: + self.run_dict[runnum] = {} + + # Now iterate through objects (should only be one IOV, but multiple channels) + while itr.goToNext(): + obj = itr.currentRef() + if self.verbose: print(obj.payload()) + + sensor = obj.payload()['sensor'] + strip = obj.payload()['strip'] + occupancy = obj.payload()['occupancy'] + + if occupancy < self.noise_threshold: continue + + sensor_dict = self.run_dict[runnum].get(sensor, None) + if sensor_dict is None: + self.run_dict[runnum][sensor] = {} + + strip_dict = self.run_dict[runnum][sensor].get(strip, None) + if strip_dict is None: + self.run_dict[runnum][sensor][strip] = {} + + self.run_dict[runnum][sensor][strip] = occupancy + + if self.verbose: print(self.run_dict) + + # Done, close the database + self.db.closeDatabase() + + def printRunSummary(self): + + for run in self.run_dict: + + #print(f"Run {run}: {len(self.run_dict[run])} sensors with noisy strips") + + noisy_by_layer = [0] * 12 + noisy_strips_by_layer = [0] * 12 + + for sensor in self.run_dict[run]: + layer = sensor // 16 + noisy_by_layer[layer] += 1 + noisy_strips_by_layer[layer] += len(self.run_dict[run][sensor]) + + #print(f"Sensors by layer: ", end='') + #[ print(f' {n:3d}', end='') for n in noisy_by_layer] + #print() + + print(f"Run {run} strips > {100*self.noise_threshold:3.1f}% by layer: ", end='') + [ print(f' {n:3d}', end='') for n in noisy_strips_by_layer] + print() + + +# Command-line execution +if __name__ == "__main__": + + # Parse the command-line arguments + args = parse_arguments() + + run_list = parseRunList(args.runs) + run_list.sort() + + nra = NoisyRunAnalyzer(verbose=args.verbose, threshold=float(args.threshold)) + + for runnum in run_list: + nra.addRun(runnum) + + nra.printRunSummary() + diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/checkNoisyStripHist.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/checkNoisyStripHist.py new file mode 100755 index 0000000000000000000000000000000000000000..0ace2cb64ed224633bc5cf16ac81046384a5b3c2 --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/checkNoisyStripHist.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# +# Simple utility to run as a post script +# after the noisyStripFinder +# +# Usage: checkNoisyStripHist.py <histfile> [return code] +# +import sys +if len(sys.argv) <= 1: + sys.exit(1) +filename = sys.argv[1] +# +# Check previous return code if it is provided +if len(sys.argv) >= 3: + rc = int(sys.argv[2]) + if rc: sys.exit(rc) +# +# Check histogram entries +# This causes a segfault. Lets try without ROOT +#from ROOT import TFile +if False: + import ROOT + try: + f = ROOT.TFile.Open(filename, 'r') + except Exception as e: + print(e) + sys.exit(1) + else: + n = f.Get("numEvents").GetVal() + print(f"Found {filename} with {n} entries") + sys.exit(n==0) + +from pathlib import Path +f = Path(filename) +if not f.is_file(): sys.exit(1) + +size = f.stat().st_size + +if size < 1000: # Almost certainly empty + print(f"Found {f} with size {size}!") + sys.exit(1) + +sys.exit(0) diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/checkNoisyStripJobs.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/checkNoisyStripJobs.py new file mode 100755 index 0000000000000000000000000000000000000000..937c7e21d5a39812279ffdfb481025e78738dc20 --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/checkNoisyStripJobs.py @@ -0,0 +1,239 @@ +#!/usr/bin/env python3 +# +# Sept 2022, E. Torrence +# +# Script to check NoisyStrips jobs for problems +# +# Usage: +# ./checkNoisyJobs.py -h +# +import sys +import argparse +from pathlib import Path + +import ROOT + +def parse_arguments(): + + description="Script to check noisy strip finding jobs\n" + parser = argparse.ArgumentParser(description, + formatter_class=argparse.RawTextHelpFormatter) + + + parser.add_argument("runs", nargs='+', help="Specify FASER runs or range") + + parser.add_argument("-v", "--verbose", action="store_true", help="Debugging output") + parser.add_argument("--write_missing", action="store_true", help="Write out missing runs to file") + + return parser.parse_args() + +# Take a string and turn it into a list of integers +# Can specify single values, ranges, or comma separated lists of both +def parseRunList(runlist): + + run_list = [] + + # Check if this is a file with run numbers + if len(runlist) == 1: + path = Path(runlist[0]) + if path.exists() and path.is_file(): + print(f"Reading runs from {path}") + # Try reading each line as a run number + with path.open() as f: + for line in f.readlines(): + line = line.strip() + if len(line) == 0: continue + if line[0] in ['#', '!']: continue + if not line.isnumeric(): + print(f"Error parsing {line}") + continue + run_list.append(int(line)) + # Done reading file + return(run_list) + elif '-' in runlist[0]: + pass + elif ',' in runlist[0]: + pass + elif not runlist[0].isnumeric(): + print(f"File {path} doesn't exist!") + return run_list + + for string in runlist: + tokens = string.split(',') + + for segment in tokens: + + if len(segment) == 0: continue + + if '-' in segment: # Range of runs + start, end = segment.split('-') + if not start.isnumeric(): + print(f"Found invalid run {start}") + continue + if not end.isnumeric(): + print(f"Found invalid run {end}") + continue + start = int(start) + end = int(end) + run_list.extend(list(range(int(start), int(end)+1))) + + else: + if not segment.isnumeric(): + print(f"Found invalid run {segment}") + continue + run_list.append(int(segment)) + + return(run_list) + +class JobChecker: + + def __init__(self, run=None): + self.select_run(run) + + self.verbose = True + self.check_all = False + + self.check_function_list = [] + self.check_function_list.append(self.check_directory) + self.check_function_list.append(self.check_histograms) + self.check_function_list.append(self.check_dbfile) + + def select_run(self, run): + self.run_number = run + if run is None: return + self.run_string = f'{run:06d}' + + def check_run(self, run=None): + # Return true on error + + if run is not None: + self.select_run(run) + + if run is None: + print("No run specified!") + return True + + for func in self.check_function_list: + if func(): return True + + return False + + def check_directory(self): + + directory_path = Path(self.run_string) + if not directory_path.exists(): + print(f"* Directory {self.run_string} not found!") + elif self.verbose: + print(f" => Directory {self.run_string} found") + return( not directory_path.exists() ) + + def check_dbfile(self): + + dbfile_path = Path(f'{self.run_string}/noisy_{self.run_string}.db') + if not dbfile_path.exists(): + print(f"* Database file {dbfile_path} not found!") + elif self.verbose: + print(f" => Database file {dbfile_path} found") + return( not dbfile_path.exists() ) + + def check_histograms(self): + + # First, find the submit files + directory_path = Path(self.run_string) + + submit_list = directory_path.glob('noise*.sub') + + missing = False + + for filepath in submit_list: + + # Ignore DAG + if '.dag.' in str(filepath): continue + + filestem = filepath.stem + hist_file = directory_path / Path(f'{filestem}.root') + if hist_file.exists(): + if self.verbose: print(f" => Found histogram file {hist_file}") + + # Check number of events? + try: + f = ROOT.TFile.Open(str(hist_file), 'r') + except Exception as e: + print(e) + missing = True + else: + n = f.Get("numEvents").GetVal() + if self.verbose: + print(f"{hist_file} found with {n} entries") + if n == 0: + print(f"{hist_file} found with {n} entries") + missing = True + + continue + + # Histogram doesn't exist + missing = True + print(f"* Histogram file {hist_file} missing! ", end="") + + # See if we can figure out why + logfile_path = directory_path / Path(f'{filestem}.log') + if not logfile_path.exists(): + print("=> log file not found") + continue + + import subprocess + if subprocess.call(['/bin/grep', "Killed", f"{logfile_path}"], + stdout=subprocess.DEVNULL): + # True means no match + pass + + else: + # False means match + # See if we can find the time + rc = subprocess.run(['/bin/grep', 'Job finished after', f"{logfile_path}"], + stdout=subprocess.PIPE, + universal_newlines=True) + + if rc.returncode: + # Can't find running time + print("=> job appears to have been killed") + else: + timestr = rc.stdout.replace('Job finished after ', '') + print(f"=> job appears to have been killed after {timestr}") + continue + + # Can't figure out why + print('=> unknown problem') + + return missing + +# Command-line execution +if __name__ == "__main__": + + # Parse the command-line arguments + args = parse_arguments() + + run_list = parseRunList(args.runs) + run_list.sort() + + good_runs = [] + missing_runs = [] + + jc = JobChecker() + jc.verbose = args.verbose + + for runnum in run_list: + + if args.verbose: print(f"\nRun {runnum}") + + if jc.check_run(runnum): + missing_runs.append(runnum) + else: + good_runs.append(runnum) + + print(f"Found {len(good_runs)} good runs and {len(missing_runs)} missing runs") + if args.write_missing: + missing_file="missing_runs.txt" + with open(missing_file, "w") as f: + [f.write(f"{run}\n") for run in missing_runs] + print(f"Wrote {len(missing_runs)} missing runs to {missing_file}") diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/findFaserRunsByType.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/findFaserRunsByType.py new file mode 100755 index 0000000000000000000000000000000000000000..5a6809ea3556f4bd5d56fc492ae615941b631372 --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/findFaserRunsByType.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 +# +# Sept 2022, E. Torrence +# +# Script to find FASER runs taken in Physics +# +# Usage: +# ./findRuns.py -h +# +import json +import argparse +import requests + +from pathlib import Path + +def parse_arguments(): + + description="Script to find PHYSICS runs in a range\n" + parser = argparse.ArgumentParser(description, + formatter_class=argparse.RawTextHelpFormatter) + + + parser.add_argument("runs", nargs='+', help="Specify FASER runs or range") + + parser.add_argument("-v", "--verbose", action="store_true", help="Debugging output") + parser.add_argument("-o", "--output", default="findRuns.txt", help="Specify output file") + parser.add_argument("-t", "--type", default="Physics", help="Run type to match") + return parser.parse_args() + +# Take a string and turn it into a list of integers +# Can specify single values, ranges, or comma separated lists of both +def parseRunList(runlist): + + run_list = [] + + # Check if this is a file with run numbers + if len(runlist) == 1: + path = Path(runlist[0]) + if path.exists() and path.is_file(): + print(f"Reading runs from {path}") + # Try reading each line as a run number + with path.open() as f: + for line in f.readlines(): + line = line.strip() + if len(line) == 0: continue + if line[0] in ['#', '!']: continue + if not line.isnumeric(): + print(f"Error parsing {line}") + continue + run_list.append(int(line)) + # Done reading file + return(run_list) + + for string in runlist: + tokens = string.split(',') + + for segment in tokens: + + if len(segment) == 0: continue + + if '-' in segment: # Range of runs + start, end = segment.split('-') + start = int(start) + end = int(end) + run_list.extend(list(range(int(start), int(end)+1))) + + else: + run_list.append(int(segment)) + + return(run_list) + +# Command-line execution +if __name__ == "__main__": + + # Parse the command-line arguments + args = parse_arguments() + + run_list = parseRunList(args.runs) + run_list.sort() + + with open(args.output, "w") as f: + f.write(f"# findRuns.py") + [f.write(f" {run}") for run in args.runs] + f.write("\n") + + # faser-runinfo address + url = "https://faser-runinfo.app.cern.ch/cgibin/" + + # Cycle through range + for run in run_list: + query = f"{url}/getRunInfo.py?runno={run}" + response = requests.get(query) + + if not response.json(): + if args.verbose: + print(f"Couldn't find run {run}") + + continue + + run_type = response.json()['type'] + if args.verbose: + print(f"Run {run} has type {run_type}") + if run_type != args.type: continue + + with open(args.output, "a") as f: + f.write(f"{run}\n") + + # End of loop over runs diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/makeEmptyNoisyStripDB.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/makeEmptyNoisyStripDB.py new file mode 100755 index 0000000000000000000000000000000000000000..aa10f2564913b8ed240920792fe97f53d23dc271 --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/makeEmptyNoisyStripDB.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +import ROOT + +from PyCool import cool + +dbSvc = cool.DatabaseSvcFactory.databaseService() +# Data DB name +connectString = 'sqlite://;schema=noisy_strips.db;dbname=CONDBR3' + +print('Creating empty database') + +dbSvc.dropDatabase(connectString) + +db = dbSvc.createDatabase(connectString) + +noisyStripsSpec = cool.RecordSpecification() +noisyStripsSpec.extend('sensor', cool.StorageType.Int32) +noisyStripsSpec.extend('strip', cool.StorageType.Int32) +noisyStripsSpec.extend('occupancy', cool.StorageType.Float) + +description = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header clid="1238547719" service_type="71" /></addrHeader><typeName>CondAttrListCollection</typeName>' +noisyStripsFolderSpec = cool.FolderSpecification(cool.FolderVersioning.SINGLE_VERSION, noisyStripsSpec) +noisyStripsFolder = db.createFolder('/SCT/DAQ/NoisyStrips', noisyStripsFolderSpec, description, True) + +firstValid = cool.ValidityKeyMin +lastValid = cool.ValidityKeyMax + +numNoisyStrips = 0 # This is used as a channel... +for dictkey in range(0, 192): + for bini in range(1): # Could go to 768, but no need to write too many empty strips + noisyStripsRecord = cool.Record(noisyStripsSpec) + noisyStripsRecord['sensor'] = int(dictkey) + noisyStripsRecord['strip'] = int(bini) + noisyStripsRecord['occupancy'] = 0. + numNoisyStrips += 1 + noisyStripsFolder.storeObject(firstValid, lastValid, noisyStripsRecord, numNoisyStrips) + +db.closeDatabase() + +# Do this again for MC +connectString = 'sqlite://;schema=noisy_strips.db;dbname=OFLP200' + +print('Creating empty MC database') + +# Don't drop the DB we just made +#dbSvc.dropDatabase(connectString) + +# Create new DB (different name in same file for MC +db = dbSvc.createDatabase(connectString) + +noisyStripsFolder = db.createFolder('/SCT/DAQ/NoisyStrips', noisyStripsFolderSpec, description, True) + +firstValid = cool.ValidityKeyMin +lastValid = cool.ValidityKeyMax + +numNoisyStrips = 0 # This is used as a channel... +for dictkey in range(0, 192): + for bini in range(1): # Could go to 768, but no need to write too many empty strips + noisyStripsRecord = cool.Record(noisyStripsSpec) + noisyStripsRecord['sensor'] = int(dictkey) + noisyStripsRecord['strip'] = int(bini) + noisyStripsRecord['occupancy'] = 0. + numNoisyStrips += 1 + noisyStripsFolder.storeObject(firstValid, lastValid, noisyStripsRecord, numNoisyStrips) + +db.closeDatabase() + +print('Database completed') diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/makeNoisyStripDB.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/makeNoisyStripDB.py new file mode 100755 index 0000000000000000000000000000000000000000..4d58e4f4df7d7c7be06f6290c9ce16cd09d94661 --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/makeNoisyStripDB.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python3 + +import os +import sys +import ROOT +import argparse +from PyCool import cool +from CoolConvUtilities.AtlCoolLib import indirectOpen + +parser = argparse.ArgumentParser() +parser.add_argument("file", nargs="+", help="full path to input file") +parser.add_argument("-t", "--threshold", type=float, default=0.001, help="add strips with an occupancy larger this threshold to the database") +parser.add_argument("--force", "-f", action="store_true", help="Overwrite existing DB") +parser.add_argument("--permissive", action="store_true", help="Allow some input files to be missing") +parser.add_argument("--output", "-o", default="noisy_strips.db", help="Specify output DB") +parser.add_argument("--isMC", action="store_true", help="Write MC DB (default: real data") +args = parser.parse_args() + +def GetKeyNames(self): + return [key.GetName() for key in f.GetListOfKeys()] + +ROOT.TFile.GetKeyNames = GetKeyNames + +numEvents = 0 +nfiles = 0 +HistDict = {} + +ROOT.TH1.AddDirectory(0) # This is necessary in order to have the histogram data after closing the file + +trigger = None +iovlo = cool.ValidityKeyMax +iovhi = cool.ValidityKeyMin + +# Keys to skip +skipList = ["numEvents", "trigger", "IOVLoRun", "IOVLoLB", "IOVHiRun", "IOVHiLB"] + +for inputfile in args.file: + # Check that this exists + if not os.path.exists(inputfile) : + if args.permissive: continue + print(f"File {inputfile} not found!") + sys.exit(1) + + try: + f = ROOT.TFile.Open(inputfile, "r") + except Exception as e: + print(e) + if args.permissive: continue + sys.exit(1) + + n = f.Get("numEvents").GetVal() + print(f"Found {n} events in {inputfile}") + if n == 0: continue + numEvents += n + lorun = f.Get("IOVLoRun").GetVal() + hirun = f.Get("IOVHiRun").GetVal() + lo = (lorun << 32) + hi = ((hirun+1) << 32) - 1 + if lo < iovlo: iovlo = lo + if hi > iovhi: iovhi = hi + + if trigger is None: + trigger = f.Get("trigger").GetVal() + else: + t = f.Get("trigger").GetVal() + if t != trigger: + print(f"Trigger mismatch! {t} != {trigger} in {inputfile}") + sys.exit(1) # This shouldn't happen + + for rootkey in f.GetKeyNames(): + + # skip over the root objects TParameters that store the trigger and number of events data + if rootkey in skipList: continue + + if rootkey in HistDict: # if sensor histogram has already been stored, then add to it + HistDict[rootkey].Add(f.Get(rootkey),1.0) + else: # if sensor histogram has not already been stored, then store this histogram + HistDict[rootkey] = f.Get(rootkey).Clone() + + nfiles += 1 + f.Close() + +print(f"Total {nfiles} analyzed with {numEvents} events") +print(f"Trigger mask = 0x{trigger:02x}") +print(f"IOV from {(iovlo >> 32)}/{(iovlo & 0xFFFFFFFF)} to {(iovhi >> 32)}/{(iovhi & 0xFFFFFFFF)}") + +# Write DB + +dbSvc = cool.DatabaseSvcFactory.databaseService() +dbname = "CONDBR3" # Real data +if args.isMC: + dbname="OFLP200" # MC + +connectString = f'sqlite://;schema={args.output};dbname={dbname}' + +print(f"Using connection string {connectString}") + +if os.path.exists(args.output): + if args.force: + print(f"Deleting {args.output} due to --force") + os.remove(args.output) + else: + print(f"File {args.output} exists, use --force to overwrite") + sys.exit(1) + +try: + print('Creating database') + dbSvc.dropDatabase(connectString) + db = dbSvc.createDatabase(connectString) +except Exception as e: + print(e) + sys.exit(1) + +noisyStripsSpec = cool.RecordSpecification() +noisyStripsSpec.extend('sensor', cool.StorageType.Int32) +noisyStripsSpec.extend('strip', cool.StorageType.Int32) +noisyStripsSpec.extend('occupancy', cool.StorageType.Float) + +description = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header clid="1238547719" service_type="71" /></addrHeader><typeName>CondAttrListCollection</typeName>' +noisyStripsFolderSpec = cool.FolderSpecification(cool.FolderVersioning.SINGLE_VERSION, noisyStripsSpec) + +print("Creating new folder") +noisyStripsFolder = db.createFolder('/SCT/DAQ/NoisyStrips', noisyStripsFolderSpec, description, True) + +firstValid = iovlo +lastValid = iovhi + +numNoisyStrips = 0 +for dictkey in HistDict: + HistDict[dictkey].Scale(1.0/float(numEvents)) + for bini in range(768): + if HistDict[dictkey].GetBinContent(bini+1) >= args.threshold: + noisyStripsRecord = cool.Record(noisyStripsSpec) + noisyStripsRecord['sensor'] = int(dictkey) + noisyStripsRecord['strip'] = int(bini) + noisyStripsRecord['occupancy'] = HistDict[dictkey].GetBinContent(bini+1) + noisyStripsFolder.storeObject(firstValid, lastValid, noisyStripsRecord, numNoisyStrips) + numNoisyStrips += 1 +db.closeDatabase() + +print('Database completed') +print(f"Added {numNoisyStrips} strips to database") diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/mergeNoisyStripDB.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/mergeNoisyStripDB.py new file mode 100755 index 0000000000000000000000000000000000000000..121aedcc342604846ef9911559ef8b5efb0818a9 --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/mergeNoisyStripDB.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 +# +# Sept 2022, E. Torrence +# +# Script to merge individual run DBs into a master DB +# +# Usage: +# ./mergeNoisyDBRuns.py -h +# +import sys +import argparse +import subprocess + +from pathlib import Path + +def parse_arguments(): + + description="Script to merge DBs from individual runs into one DB\n" + parser = argparse.ArgumentParser(description, + formatter_class=argparse.RawTextHelpFormatter) + + + parser.add_argument("runs", nargs='+', help="Specify FASER runs or range") + + parser.add_argument("-v", "--verbose", action="store_true", help="Debugging output") + parser.add_argument("-a", "--append", action="store_true", help="Append (rather than overwrite) existing file") + parser.add_argument("-o", "--output", default="noisy_strips.db", help="Specify output DB name") + return parser.parse_args() + +# Take a string and turn it into a list of integers +# Can specify single values, ranges, or comma separated lists of both +def parseRunList(runlist): + + run_list = [] + + # Check if this is a file with run numbers + if len(runlist) == 1: + path = Path(runlist[0]) + if path.exists() and path.is_file(): + print(f"Reading runs from {path}") + # Try reading each line as a run number + with path.open() as f: + for line in f.readlines(): + line = line.strip() + if len(line) == 0: continue + if line[0] in ['#', '!']: continue + if not line.isnumeric(): + print(f"Error parsing {line}") + continue + run_list.append(int(line)) + # Done reading file + return(run_list) + elif '-' in runlist[0]: + pass + elif ',' in runlist[0]: + pass + elif not runlist[0].isnumeric(): + print(f"File {path} doesn't exist!") + return run_list + + for string in runlist: + tokens = string.split(',') + + for segment in tokens: + + if len(segment) == 0: continue + + if '-' in segment: # Range of runs + start, end = segment.split('-') + if not start.isnumeric(): + print(f"Found invalid run {start}") + continue + if not end.isnumeric(): + print(f"Found invalid run {end}") + continue + start = int(start) + end = int(end) + run_list.extend(list(range(int(start), int(end)+1))) + + else: + if not segment.isnumeric(): + print(f"Found invalid run {segment}") + continue + run_list.append(int(segment)) + + return(run_list) + + +# Command-line execution +if __name__ == "__main__": + + # Parse the command-line arguments + args = parse_arguments() + + run_list = parseRunList(args.runs) + run_list.sort() + + first = True + + for runnum in run_list: + + if args.verbose: print(f"\nRun {runnum}") + runstr = f'{runnum:06d}' + infile = Path(f'{runstr}/noisy_{runstr}.db') + if not infile.is_file(): + print(f"{runstr}/noisy_{runstr}.db doesn't exist!") + continue + + command = ['AtlCoolCopy'] + command.append(f'sqlite://;schema={runstr}/noisy_{runstr}.db;dbname=CONDBR3') + command.append(f'sqlite://;schema={args.output};dbname=CONDBR3') + if first: + first = False + target = Path(args.output) + if not target.is_file(): + print(f"Creating file {args.output}") + command.append("-create") + elif args.append: + print(f"Appending to existing file {args.output}") + else: + print(f"Deleting existing file {args.output}") + target.unlink() + command.append("-create") + + command.extend(["-alliov", "-nrls", f"{runnum}", "0"]) + + if args.verbose: print(command) + rc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + universal_newlines=True) + + if args.verbose: print(rc.stdout) + diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/runFaserScript.sh b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/runFaserScript.sh new file mode 100755 index 0000000000000000000000000000000000000000..97d599b73e1046223cc0c6e938062b1206c6e82c --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/runFaserScript.sh @@ -0,0 +1,80 @@ +#!/bin/bash +# +# Wrapper to run any python script in the athena environment +# Typically used to set up a script for use in condor +# +# runFaserScript.sh --rel <release_directory> script.py arguments... +# +function print_usage { + echo "Usage: runFaserScript.sh --rel <release_directory> [--log <logfile>] script.py [arguments]" + echo " The first uption must be the release directory where asetup is called" + echo " All other options are passed to script.py" + echo " Options: " + echo " -h - print usage" + echo " --rel <release_directory> - specify release directory" + echo " --log <logfile> - redirect script output to logfile" + echo " -- End of options considered by this script" +} +# +release_directory="" +logfile="" +while [ -n "$1" ] +do + case "$1" in + -h | --help) + print_usage + exit 0;; + + --rel) + release_directory="$2"; + shift; + shift;; + + --log) + logfile="$2"; + shift; + shift;; + + --) # Signal that everything else should be executed + shift; + break;; + + *) + # Nothing we recognize, execute everything remaining + break;; + esac +done + +if [ -z "$release_directory" ]; then + echo "Must specify release" + print_usage + exit 1 +fi + +# Redirect to log file if requested +if [ ! -z "$logfile" ]; then +#logfile="post_`date +%m%d-%H%M%S`.log" + exec >& "$logfile" +fi +# +# Set up release +starting_directory=`pwd` +echo "cd $release_directory" +cd $release_directory +# +# Set up the release +export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase +# Must pass something or source will pass *this* script's arguments instead +source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh -- +echo "fsetup Athena,22.0.49" +asetup --input=calypso/asetup.faser Athena,22.0.49 +echo "source run/setup.sh" +source run/setup.sh +# +# Go back to where we started +echo "cd $starting_directory" +cd $starting_directory +# +# Now run the command +echo "$@" +eval "$@" diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/share/submitNoisyStripJobs.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/submitNoisyStripJobs.py new file mode 100755 index 0000000000000000000000000000000000000000..ed780ab1b21c584180be5c0fab4dd8685a3c0abc --- /dev/null +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/share/submitNoisyStripJobs.py @@ -0,0 +1,256 @@ +#!/usr/bin/env python3 +# +# Sept 2022, E. Torrence +# +# Script to run noisy strip finder on a given run +# This creates condor submitssion scripts and a DAG +# and submits those to run the jobs +# +# Usage: +# submitNoisyStripJobs.py -h +# +import os +import sys +import argparse +import subprocess + +from pathlib import Path + +def parse_arguments(): + + description="Script to submit jobs to find noisy strips" + parser = argparse.ArgumentParser(description, + formatter_class=argparse.RawTextHelpFormatter) + + + parser.add_argument("runs", nargs='+', help="Specify FASER runs") + + parser.add_argument("--per_job", type=int, default=25, help="Specify maximum files per job") + parser.add_argument("--release", default='.', help="Specify path to release directory") + parser.add_argument("--nosubmit", action="store_true", help="Don't submit jobs") + parser.add_argument("--nocleanup", action="store_true", help="Don't cleanup output directory on completion") + parser.add_argument("--queue", default="longlunch", help="Specify queue (longlunch=2h (default), workday=8h)") + parser.add_argument("--rawdir", default="/eos/experiment/faser/raw/2022", + help="Specify raw data directory (default: /eos/experiment/faser/raw/2022)") + + return parser.parse_args() + +# Take a string and turn it into a list of integers +# Can specify single values, ranges, or comma separated lists of both +def parseRunList(runlist): + + run_list = [] + + # Check if this is a file with run numbers + if len(runlist) == 1: + path = Path(runlist[0]) + if path.exists() and path.is_file(): + print(f"Reading runs from {path}") + # Try reading each line as a run number + with path.open() as f: + for line in f.readlines(): + line = line.strip() + if len(line) == 0: continue + if line[0] in ['#', '!']: continue + if not line.isnumeric(): + print(f"Error parsing {line}") + continue + run_list.append(int(line)) + # Done reading file + return(run_list) + + for string in runlist: + tokens = string.split(',') + + for segment in tokens: + + if len(segment) == 0: continue + + if '-' in segment: # Range of runs + start, end = segment.split('-') + start = int(start) + end = int(end) + run_list.extend(list(range(int(start), int(end)+1))) + + else: + run_list.append(int(segment)) + + return(run_list) + +# Command-line execution +if __name__ == "__main__": + + # Parse the command-line arguments + args = parse_arguments() + + run_list = parseRunList(args.runs) + run_list.sort() + + # Check some things + rel_dir = Path(args.release) + package_dir = rel_dir / Path("calypso/Tracker/TrackerRecAlgs/NoisyStripFinder") + + # Script to allow python scripts to be run in condor in the FASER environment + env_exec = package_dir / Path("share/runFaserScript.sh") + + if not env_exec.exists(): + print(f"Can't find executable in release directory {args.release}") + sys.exit(1) + + print(f"Start processing {len(run_list)} runs") + + for run in run_list: + print(f"Working on run {run}") + + runstr = f"{run:06d}" + + # Get file list + raw_dir = Path(f'{args.rawdir}/{runstr}') + file_list = list(raw_dir.glob("Faser-Physics*.raw")) + + # Now we need to decide what to do + nraw = len(file_list) + njobs = (nraw-1) // args.per_job + 1 + if njobs == 1: + print(f"{nraw} raw files found, submitting {njobs} job") + else: + print(f"{nraw} raw files found, submitting {njobs} jobs") + + if njobs == 0: continue + + # Create a directory for this + jobdir = Path(runstr) + if jobdir.exists(): + print(f"Directory {jobdir} exists, deleting...") + import shutil + shutil.rmtree(jobdir.resolve()) + + jobdir.mkdir(exist_ok=True) + submit_list = [] + + # Start the DAG file + dagfile = jobdir / Path(f"noise_{runstr}.dag") + with open(dagfile, 'w') as d: + d.write(f"# Auto-generated DAG submission script for {runstr}\n") + + for job in range(njobs): + + jobstr = f"{job:03d}" + if njobs == 1: + jobname = f"noise_{runstr}" + else: + jobname = f"noise_{runstr}_{jobstr}" + subfile = jobdir / Path(jobname+".sub") + + ilo = job * args.per_job + ihi = ilo + args.per_job + job_files = file_list[ilo:ihi] + + # + # Generate a job submission script + print(f"Writing {subfile}") + submit_list.append(jobname) + with open(subfile, "w") as f: + f.write(f"# Auto-generated submission script for {jobname}\n") + # Set the queue workday = 8h, longlunch = 2h might be enough + f.write(f'+JobFlavour = "{args.queue}"\n') + f.write(f"executable = {env_exec.resolve()}\n") + f.write(f"output = {jobdir.resolve()}/{jobname}.out\n") + f.write(f"error = {jobdir.resolve()}/{jobname}.err\n") + f.write(f"log = {jobdir.resolve()}/{jobname}.log\n") + # No newline as we need to add input files + f.write(f"arguments = --rel {rel_dir.resolve()} NoisyStripFinderJob.py --out {jobname}.root ") + [f.write(f" {filename}") for filename in job_files] + f.write("\n") + f.write("queue") + + # Also add this to our DAG + with open(dagfile, 'a') as d: + d.write(f"JOB {jobname} {subfile.name}\n") + # Also check that the histogram isn't empty + # This can fix some file read errors + d.write(f"SCRIPT POST {jobname} {env_exec.resolve()} --rel {rel_dir.resolve()} checkNoisyStripHist.py {jobname}.root $RETURN\n") + + # Done writing individual jobs + + # Add the merge job to the DAG + with open(dagfile, 'a') as d: + d.write(f"JOB merge_{runstr} merge_{runstr}.sub\n") + d.write("PARENT") + for jobname in submit_list: + d.write(f" {jobname}") + d.write(f" CHILD merge_{runstr}\n") + # Add a retry directive + d.write(f"RETRY ALL_NODES 1\n") + + # Write the merge job submit script + jobname = f"merge_{runstr}" + subfile = jobdir / Path(jobname+".sub") + with open(subfile, "w") as f: + f.write(f"# Auto-generated submission script for {jobname}\n") + f.write(f"output = {jobdir.resolve()}/{jobname}.out\n") + f.write(f"error = {jobdir.resolve()}/{jobname}.err\n") + f.write(f"log = {jobdir.resolve()}/{jobname}.log\n") + + #f.write('+JobFlavour = "workday"\n') # 8 hours, longlunch might be enough + #f.write(f"executable = {hist_exec.resolve()}\n") + f.write(f"executable = {env_exec.resolve()}\n") + + # No newline as we need to add input files + # f.write(f"arguments = --rel {rel_dir.resolve()} --force -o noisy_{runstr}.db") + f.write(f"arguments = --rel {rel_dir.resolve()} makeNoisyStripDB.py --force -o noisy_{runstr}.db") + [f.write(f" {filename}.root") for filename in submit_list] + f.write("\n") + + # Provide files to transfer + f.write(f"transfer_input_files = {submit_list[0]}.root") + [f.write(f",{filename}.root") for filename in submit_list[1:]] + f.write("\n") + f.write(f"should_transfer_files = IF_NEEDED\n") + + # Don't forget queue command + f.write("queue") + + # Do we want a cleanup script? + if not args.nocleanup: + with open(dagfile, 'a') as d: + d.write(f"SCRIPT POST merge_{runstr} cleanup.sh $RETURN\n") + + cleanup_file = jobdir / Path("cleanup.sh") + with open(cleanup_file, 'w') as f: + f.write("#!/bin/bash\n") + f.write('if [[ $1 != "0" ]]; then\n') + f.write(' exit $1\n') + f.write('fi\n') + # f.write('rm noise_{runstr}.dag.* \n') + f.write('rm *.log\n') + f.write('rm *.err\n') + f.write('rm eventLoopHeartBeat.txt\n') + f.write('rm *.cc\n') + for job in submit_list: + f.write(f'gzip {job}.out\n') + #f.write('gzip merge*.out\n') + f.write('exit 0\n') + + # And make it executable + import stat + cleanup_file.chmod(cleanup_file.stat().st_mode | stat.S_IEXEC) + + if not args.nosubmit: + print(f"Submitting noise_{runstr}.dag") + startdir = os.getcwd() + os.chdir(jobdir) + + # lxplus python3 is 3.6.8, so use old subprocess.run arguments + proc = subprocess.run(["/usr/bin/condor_submit_dag", f"noise_{runstr}.dag"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True) + + if len(proc.stdout) > 0: + print(proc.stdout) + if len(proc.stderr) > 0: + print(proc.stderr) + + # Chaange back to our starting point + os.chdir(startdir) diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/src/NoisyStripFinder.cxx b/Tracker/TrackerRecAlgs/NoisyStripFinder/src/NoisyStripFinder.cxx index 71598696beb29fa13256f18193738eaee2b38ee5..c5f3f907852e40b072151c5e53c94ebdf6b3ea1b 100644 --- a/Tracker/TrackerRecAlgs/NoisyStripFinder/src/NoisyStripFinder.cxx +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/src/NoisyStripFinder.cxx @@ -9,6 +9,7 @@ #include "TrackerRawData/FaserSCT_RDO_Container.h" #include "TrackerRawData/FaserSCT_RDORawData.h" #include "StoreGate/WriteHandle.h" +#include "xAODEventInfo/EventInfo.h" #include <sstream> #include <string.h> @@ -24,6 +25,7 @@ NoisyStripFinder::NoisyStripFinder(const std::string& name, ISvcLocator* pSvcLoc AthReentrantAlgorithm(name, pSvcLocator), m_idHelper{nullptr} { + m_iovrange = IOVRange(IOVTime(), IOVTime()); // Make sure this starts undefined } // Initialize method: @@ -32,6 +34,7 @@ StatusCode NoisyStripFinder::initialize() { ATH_CHECK(m_rdoContainerKey.initialize()); ATH_CHECK(m_FaserTriggerData.initialize()); + ATH_CHECK(m_eventInfo.initialize()); // Get the SCT ID helper ATH_CHECK(detStore()->retrieve(m_idHelper, "FaserSCT_ID")); @@ -43,7 +46,7 @@ StatusCode NoisyStripFinder::initialize() { StatusCode NoisyStripFinder::execute(const EventContext& ctx) const { SG::ReadHandle<xAOD::FaserTriggerData> xaod(m_FaserTriggerData, ctx); - + int trig_int = xaod->tap(); int trigmask_int = m_triggerMask.value(); @@ -53,8 +56,22 @@ StatusCode NoisyStripFinder::execute(const EventContext& ctx) const { if (!(xaod->tap() & m_triggerMask.value())) return StatusCode::SUCCESS; // only process events that pass the trigger mask - ATH_MSG_INFO("trigger passed mask"); - ++m_numberOfEvents; + ATH_MSG_DEBUG("trigger passed mask"); + ++m_numberOfEvents; + + // Keep track of run + SG::ReadHandle<xAOD::EventInfo> xevt(m_eventInfo, ctx); + ATH_MSG_DEBUG("Found run number: " << xevt->runNumber()); + IOVTime iov(xevt->runNumber(), xevt->lumiBlock()); + ATH_MSG_DEBUG("IOV: " << iov); + + if (!m_iovrange.start().isValid()) + m_iovrange = IOVRange(iov, iov); + + if (iov > m_iovrange.stop()) + m_iovrange = IOVRange(m_iovrange.start(), iov); + + ATH_MSG_DEBUG("Range: " << m_iovrange); // First, we have to retrieve and access the container, not because we want to // use it, but in order to generate the proxies for the collections, if they @@ -99,17 +116,19 @@ StatusCode NoisyStripFinder::execute(const EventContext& ctx) const { StatusCode NoisyStripFinder::finalize() { ATH_MSG_INFO("NoisyStripFinder::finalize()"); - ATH_MSG_INFO( m_numberOfEvents << " events processed" ); + ATH_MSG_INFO( m_numberOfEvents << " events found" ); ATH_MSG_INFO( m_numberOfRDOCollection << " RDO collections processed" ); ATH_MSG_INFO( m_numberOfRDO<< " RawData" ); - ATH_MSG_INFO( "Number of sensors found = " << NoisyStrip_histmap.size() << " out of 144" ); + ATH_MSG_INFO( "Number of sensors found = " << NoisyStrip_histmap.size() << " out of 192" ); - for (int ihash = 0; ihash < 144; ++ihash){ // print out the sensors that are missing + for (int ihash = 0; ihash < 192; ++ihash){ // print out the sensors that are missing if ( NoisyStrip_histmap.count(ihash) == 0 ){ ATH_MSG_INFO("missing sensor # " << ihash); } } + ATH_MSG_INFO("IOV range found = " << m_iovrange); + const char *outputname = m_OutputRootName.value().c_str(); TFile* outputfile = new TFile(outputname,"RECREATE"); @@ -119,6 +138,27 @@ StatusCode NoisyStripFinder::finalize() TParameter("numEvents", m_numberOfEvents).Write(); TParameter("trigger", trigmask_int).Write(); + // Write IOV range so we can save this to the DB + if (m_iovrange.start().isValid()) { + long run = m_iovrange.start().run(); + long lb = m_iovrange.start().event(); + TParameter("IOVLoRun", run).Write(); // re_time() + TParameter("IOVLoLB", lb).Write(); + ATH_MSG_INFO("IOV Lo: " << run << "," << lb ); + } + else + ATH_MSG_WARNING("Starting IOV time invalid"); + + if (m_iovrange.stop().isValid()) { + long run = m_iovrange.stop().run(); + long lb = m_iovrange.stop().event(); + TParameter("IOVHiRun", run).Write(); + TParameter("IOVHiLB", lb).Write(); + ATH_MSG_INFO("IOV Hi: " << run << "," << lb ); + } + else + ATH_MSG_WARNING("Ending IOV time invalid"); + std::map<int,TH1D*>::iterator it = NoisyStrip_histmap.begin(); // Iterate over the map using Iterator till end. while (it != NoisyStrip_histmap.end()){ @@ -126,7 +166,8 @@ StatusCode NoisyStripFinder::finalize() ATH_MSG_INFO( "---------- hot strip occupancy >= 0.1 for Tracker Sensor hash = "<< it->first <<" ----------" ); int i = 1; while (i <= 768){ - if ( it->second->GetBinContent(i)/(double)m_numberOfEvents >= 0.1 ){ + // This is only for information + if ( it->second->GetBinContent(i)/(double)m_numberOfEvents >= 0.01 ){ ATH_MSG_INFO( "hot strip # = " << i-1 << ", hit occupancy = " << it->second->GetBinContent(i)/(double)m_numberOfEvents ); // print out hot strips } i++; diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/src/NoisyStripFinder.h b/Tracker/TrackerRecAlgs/NoisyStripFinder/src/NoisyStripFinder.h index a1d977595765f494217f8a63f044ba40cba26540..24009b93bba234368548e53052c42df7bcb86420 100644 --- a/Tracker/TrackerRecAlgs/NoisyStripFinder/src/NoisyStripFinder.h +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/src/NoisyStripFinder.h @@ -17,7 +17,8 @@ #include "GaudiKernel/ToolHandle.h" #include "StoreGate/ReadHandleKey.h" - +#include "AthenaKernel/IOVRange.h" +#include "xAODEventInfo/EventInfo.h" //STL #include <map> @@ -72,6 +73,7 @@ class NoisyStripFinder : public AthReentrantAlgorithm { SG::ReadHandleKey<FaserSCT_RDO_Container> m_rdoContainerKey{this, "DataObjectName", "FaserSCT_RDOs", "FaserSCT RDOs"}; SG::ReadHandleKey<xAOD::FaserTriggerData> m_FaserTriggerData{ this, "FaserTriggerDataKey", "FaserTriggerData", "ReadHandleKey for xAOD::FaserTriggerData"}; + SG::ReadHandleKey<xAOD::EventInfo> m_eventInfo{ this, "EventInfoKey", "EventInfo", "ReadHandleKey for xAOD::EventInfo"}; mutable int m_numberOfEvents{0}; mutable std::atomic<int> m_numberOfRDOCollection{0}; @@ -79,6 +81,9 @@ class NoisyStripFinder : public AthReentrantAlgorithm { mutable std::map<int,TH1D*> NoisyStrip_histmap; + // Keep track of first/last IOV seen + // Stored as (run << 32) + lumi block + mutable IOVRange m_iovrange; }; } #endif // NoisyStripFinder_H diff --git a/Tracker/TrackerRecAlgs/NoisyStripFinder/test/NoisyStripFinderDbg.py b/Tracker/TrackerRecAlgs/NoisyStripFinder/test/NoisyStripFinderDbg.py old mode 100644 new mode 100755 index b3c0697cfcef6539190aeeebf26e16b4e0d3b100..f1d2ef1434110fa4a87a4567055e8f3d112f30c0 --- a/Tracker/TrackerRecAlgs/NoisyStripFinder/test/NoisyStripFinderDbg.py +++ b/Tracker/TrackerRecAlgs/NoisyStripFinder/test/NoisyStripFinderDbg.py @@ -13,18 +13,22 @@ from AthenaPoolCnvSvc.PoolWriteConfig import PoolWriteCfg from FaserByteStreamCnvSvc.FaserByteStreamCnvSvcConfig import FaserByteStreamCnvSvcCfg from TrackerPrepRawDataFormation.TrackerPrepRawDataFormationConfig import FaserSCT_ClusterizationCfg from NoisyStripFinder.NoisyStripFinderConfig import NoisyStripFinderCfg +import argparse + +parser = argparse.ArgumentParser() +parser.add_argument("file", nargs="+", help="full path to input file") +parser.add_argument("--nevents", "-n", default=-1, type=int, help="Number of events to process") +args = parser.parse_args() log.setLevel(DEBUG) Configurable.configurableRun3Behavior = True -run = 1792 -ConfigFlags.Input.Files = [f"/home/tboeckh/Documents/data/TI12/Faser-Physics-00{run}-00000.raw"] -ConfigFlags.Output.ESDFileName = f"run00{run}-00000.ESD.pool.root" -ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-01" +ConfigFlags.Input.Files = args.file +ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" ConfigFlags.Input.ProjectName = "data21" ConfigFlags.Input.isMC = False -ConfigFlags.GeoModel.FaserVersion = "FASER-01" +ConfigFlags.GeoModel.FaserVersion = "FASERNU-03" ConfigFlags.Common.isOnline = False ConfigFlags.GeoModel.Align.Dynamic = False ConfigFlags.Beam.NumberOfCollisions = 0. @@ -33,12 +37,11 @@ ConfigFlags.lock() acc = MainServicesCfg(ConfigFlags) acc.merge(PoolWriteCfg(ConfigFlags)) -acc.merge(FaserByteStreamCnvSvcCfg(ConfigFlags)) +acc.merge(FaserByteStreamCnvSvcCfg(ConfigFlags, OccupancyCut=-1)) acc.merge(FaserSCT_ClusterizationCfg( ConfigFlags, - name="LevelClustering", - DataObjectName="SCT_LEVELMODE_RDOs", - ClusterToolTimingPattern="X1X")) + DataObjectName="SCT_RDOs", + ClusterToolTimingPattern="XXX")) acc.merge(NoisyStripFinderCfg(ConfigFlags)) # Hack to avoid problem with our use of MC databases when isMC = False @@ -48,5 +51,5 @@ replicaSvc.UseCOOLSQLite = True replicaSvc.UseCOOLFrontier = False replicaSvc.UseGeomSQLite = True -sc = acc.run(maxEvents=-1) +sc = acc.run(maxEvents=args.nevents) sys.exit(not sc.isSuccess()) diff --git a/Tracker/TrackerRecAlgs/TrackerPrepRawDataFormation/python/TrackerPrepRawDataFormationConfig.py b/Tracker/TrackerRecAlgs/TrackerPrepRawDataFormation/python/TrackerPrepRawDataFormationConfig.py index 820cc73c5976089bea53e5fe2def1f87967837a9..a94a20eeea879b7e959a6b5da5825671d9291276 100644 --- a/Tracker/TrackerRecAlgs/TrackerPrepRawDataFormation/python/TrackerPrepRawDataFormationConfig.py +++ b/Tracker/TrackerRecAlgs/TrackerPrepRawDataFormation/python/TrackerPrepRawDataFormationConfig.py @@ -8,8 +8,8 @@ PileUpXingFolder=CompFactory.PileUpXingFolder from OutputStreamAthenaPool.OutputStreamConfig import OutputStreamCfg from FaserSCT_GeoModel.FaserSCT_GeoModelConfig import FaserSCT_GeometryCfg - from FaserSiLorentzAngleTool.FaserSCT_LorentzAngleConfig import FaserSCT_LorentzAngleCfg +from FaserSCT_ConditionsTools.FaserSCT_ConditionsSummaryToolConfig import FaserSCT_ConditionsSummaryToolCfg def FaserSCT_ClusterizationCommonCfg(flags, name="FaserSCT_ClusterizationToolCommon", **kwargs): @@ -20,7 +20,13 @@ def FaserSCT_ClusterizationCommonCfg(flags, name="FaserSCT_ClusterizationToolCom trackerClusterMakerTool = Tracker__TrackerClusterMakerTool(name = "TrackerClusterMakerTool") faserSCT_LorentzAngleTool=acc.popToolsAndMerge(FaserSCT_LorentzAngleCfg(flags)) kwargs.setdefault("timeBins", "01X") - clusteringTool = Tracker__FaserSCT_ClusteringTool(name, globalPosAlg = trackerClusterMakerTool, FaserSiLorentzAngleTool=faserSCT_LorentzAngleTool, **kwargs) + + FaserSCT_ConditionsSummaryTool = acc.popToolsAndMerge(FaserSCT_ConditionsSummaryToolCfg(flags)) + clusteringTool = Tracker__FaserSCT_ClusteringTool(name, + globalPosAlg=trackerClusterMakerTool, + FaserSiLorentzAngleTool=faserSCT_LorentzAngleTool, + conditionsTool=FaserSCT_ConditionsSummaryTool, + **kwargs) # clusteringTool.timeBins = "01X" # attach ToolHandles acc.setPrivateTools(clusteringTool) @@ -36,16 +42,20 @@ def FaserSCT_ClusterizationToolCfg(flags, name="FaserSCT_ClusterizationTool", ** def FaserSCT_ClusterizationBasicCfg(flags, **kwargs): """Return ComponentAccumulator for FaserSCT Clusterization""" acc = ComponentAccumulator() - pattern = kwargs.pop("ClusterToolTimingPattern","") + pattern = kwargs.pop("ClusterToolTimingPattern", "") + checkBadChannels = kwargs.pop("checkBadChannels", "False") # print("ClusterToolTimingPattern = ", pattern) if len(pattern) > 0 : - clusterTool = acc.popToolsAndMerge(FaserSCT_ClusterizationToolCfg(flags, timeBins = pattern )) + clusterTool = acc.popToolsAndMerge(FaserSCT_ClusterizationToolCfg(flags, timeBins=pattern, checkBadChannels=checkBadChannels)) else: - clusterTool = acc.popToolsAndMerge(FaserSCT_ClusterizationToolCfg(flags)) + clusterTool = acc.popToolsAndMerge(FaserSCT_ClusterizationToolCfg(flags, checkBadChannels=checkBadChannels)) + + FaserSCT_ConditionsSummaryTool = acc.popToolsAndMerge(FaserSCT_ConditionsSummaryToolCfg(flags)) kwargs.setdefault("SCT_ClusteringTool", clusterTool) kwargs.setdefault("DataObjectName", "SCT_RDOs") kwargs.setdefault("ClustersName", "SCT_ClusterContainer") + kwargs.setdefault("conditionsTool", FaserSCT_ConditionsSummaryTool) # kwargs.setdefault("SCT_FlaggedCondData", "SCT_Flags") Tracker__FaserSCT_Clusterization=CompFactory.Tracker.FaserSCT_Clusterization acc.addEventAlgo(Tracker__FaserSCT_Clusterization(**kwargs)) diff --git a/Tracker/TrackerRecAlgs/TrackerPrepRawDataFormation/src/FaserSCT_Clusterization.cxx b/Tracker/TrackerRecAlgs/TrackerPrepRawDataFormation/src/FaserSCT_Clusterization.cxx index 640ba15148970d76ad415f4a6f127f167afe9d62..aa28620812293c71f962602105ee31888baf79de 100644 --- a/Tracker/TrackerRecAlgs/TrackerPrepRawDataFormation/src/FaserSCT_Clusterization.cxx +++ b/Tracker/TrackerRecAlgs/TrackerPrepRawDataFormation/src/FaserSCT_Clusterization.cxx @@ -41,9 +41,9 @@ StatusCode FaserSCT_Clusterization::initialize() { // later and declare everything to be 'good' if it is NULL) if (m_checkBadModules.value()) { ATH_MSG_INFO("Clusterization has been asked to look at bad module info"); - // ATH_CHECK(m_pSummaryTool.retrieve()); + ATH_CHECK(m_pSummaryTool.retrieve()); } else { - // m_pSummaryTool.disable(); + m_pSummaryTool.disable(); } // m_clusterContainerLinkKey = m_clusterContainerKey.key(); diff --git a/Tracker/TrackerRecTools/FaserSiClusterizationTool/CMakeLists.txt b/Tracker/TrackerRecTools/FaserSiClusterizationTool/CMakeLists.txt index 3e22c8a99050cf977d76fc3c7dd21c482188fc41..00988a7c35f348a6bc624b5faa248ea917d8c161 100644 --- a/Tracker/TrackerRecTools/FaserSiClusterizationTool/CMakeLists.txt +++ b/Tracker/TrackerRecTools/FaserSiClusterizationTool/CMakeLists.txt @@ -19,7 +19,7 @@ atlas_add_library( FaserSiClusterizationToolLib LINK_LIBRARIES ${ROOT_LIBRARIES} AthenaBaseComps AthenaKernel GeoPrimitives Identifier EventPrimitives GaudiKernel TrackerSimData TrackerIdentifier TrackerReadoutGeometry TrackerRawData TrackerPrepRawData - TrkParameters CxxUtils + TrkParameters CxxUtils FaserSCT_ConditionsToolsLib PRIVATE_LINK_LIBRARIES ${CLHEP_LIBRARIES} AthenaPoolUtilities FileCatalog FaserDetDescr TrkSurfaces TrkEventPrimitives ) diff --git a/Tracker/TrackerRecTools/FaserSiClusterizationTool/src/FaserSCT_ClusteringTool.cxx b/Tracker/TrackerRecTools/FaserSiClusterizationTool/src/FaserSCT_ClusteringTool.cxx index 2963d98dfdb966c92bf9541052bfe9d0e256b654..d034c3c15d008def9a2bb954674f532e329da74a 100644 --- a/Tracker/TrackerRecTools/FaserSiClusterizationTool/src/FaserSCT_ClusteringTool.cxx +++ b/Tracker/TrackerRecTools/FaserSiClusterizationTool/src/FaserSCT_ClusteringTool.cxx @@ -123,13 +123,13 @@ namespace Tracker ATH_CHECK(m_clusterMaker.retrieve()); -// if (m_checkBadChannels) { -// ATH_MSG_INFO("Clustering has been asked to look at bad channel info"); -// ATH_CHECK(m_conditionsTool.retrieve()); -// } else { -// m_conditionsTool.disable(); -// } - + if (m_checkBadChannels) { + ATH_MSG_INFO("Clustering has been asked to look at bad channel info"); + ATH_CHECK(m_conditionsTool.retrieve()); + } else { + m_conditionsTool.disable(); + } + if (m_doNewClustering and not m_lorentzAngleTool.empty()) { ATH_CHECK(m_lorentzAngleTool.retrieve()); } else { @@ -522,12 +522,12 @@ namespace Tracker unsigned int nBadStrips(0); for (unsigned int sn=thisStrip; sn!=thisStrip+nStrips; ++sn) { Identifier stripId = m_useRowInformation ? idHelper.strip_id(waferId,thisRow,sn) : idHelper.strip_id(waferId,sn); -// if (m_conditionsTool->isGood(stripId, InDetConditions::SCT_STRIP)) { + if (m_conditionsTool->isGood(stripId, InDetConditions::SCT_STRIP)) { currentVector.push_back(stripId); -// } else { -// currentVector.push_back(badId); -// ++nBadStrips; -// } + } else { + currentVector.push_back(badId); + ++nBadStrips; + } if (stripCount < 16) { hitsInThirdTimeBin = hitsInThirdTimeBin | (timePattern.test(0) << stripCount); } diff --git a/Tracker/TrackerRecTools/FaserSiClusterizationTool/src/FaserSCT_ClusteringTool.h b/Tracker/TrackerRecTools/FaserSiClusterizationTool/src/FaserSCT_ClusteringTool.h index 94a307a69bb6b79d1dbbe898039febf61f3150c0..0c56c468d21251e1b71b237c0591685244ff7b38 100644 --- a/Tracker/TrackerRecTools/FaserSiClusterizationTool/src/FaserSCT_ClusteringTool.h +++ b/Tracker/TrackerRecTools/FaserSiClusterizationTool/src/FaserSCT_ClusteringTool.h @@ -13,6 +13,7 @@ #include "AthenaBaseComps/AthAlgTool.h" #include "Identifier/Identifier.h" //#include "FaserSCT_ConditionsTools/ISCT_DCSConditionsTool.h" +#include "FaserSCT_ConditionsTools/ISCT_ConditionsSummaryTool.h" #include "InDetCondTools/ISiLorentzAngleTool.h" #include "TrackerReadoutGeometry/SiDetectorElementCollection.h" #include "FaserSiClusterizationTool/IFaserSCT_ClusteringTool.h" @@ -70,6 +71,7 @@ namespace Tracker // ToolHandle<ISCT_DCSConditionsTool> m_conditionsTool{this, "FaserSCT_DCSConditionsTool", "FaserSCT_ConditionsTools", "Tool to retrieve SCT Conditions summary"}; const FaserSCT_ID* m_pHelper; //!< ID helper for FaserSCT + ToolHandle<ISCT_ConditionsSummaryTool> m_conditionsTool{this, "conditionsTool", "FaserSCT_ConditionsTools/FaserSCT_ConditionsSummaryTool", "Tool to retrieve SCT Conditions summary"}; ToolHandle< TrackerClusterMakerTool > m_clusterMaker{this, "globalPosAlg", "TrackerClusterMakerTool"}; ToolHandle<ISiLorentzAngleTool> m_lorentzAngleTool {this, "FaserSiLorentzAngleTool", "FaserSiLorentzAngleTool", "Tool to retreive Lorentz angle of SCT"}; @@ -130,9 +132,8 @@ namespace Tracker // Inline methods /////////////////////////////////////////////////////////////////// - inline bool FaserSCT_ClusteringTool::isBad(const Identifier& ) const { - return false; - // return (not m_conditionsTool->isGood(stripId, InDetConditions::SCT_STRIP)); + inline bool FaserSCT_ClusteringTool::isBad(const Identifier& stripId) const { + return (not m_conditionsTool->isGood(stripId, InDetConditions::SCT_STRIP)); } inline bool FaserSCT_ClusteringTool::testTimeBinsN(const std::bitset<3>& timePattern) const { diff --git a/Tracking/Acts/FaserActsKalmanFilter/python/TI12CKF2Config.py b/Tracking/Acts/FaserActsKalmanFilter/python/TI12CKF2Config.py index 35d2995643014915040e60e0094f1e6f6c7cac54..87101612bd15a3c33c670ef3f0d6ccd5c78f4a5c 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/python/TI12CKF2Config.py +++ b/Tracking/Acts/FaserActsKalmanFilter/python/TI12CKF2Config.py @@ -88,6 +88,16 @@ def TI12CKF2Cfg(flags, **kwargs): kalman_fitter1.RootTrajectorySummaryWriterTool = trajectory_summary_writer_tool1 ckf.KalmanFitterTool1 = kalman_fitter1 + kalman_fitter2 = CompFactory.KalmanFitterTool(name="fitterTool2", **kwargs) + kalman_fitter2.noDiagnostics = kwargs["noDiagnostics"] + kalman_fitter2.ActsLogging = "INFO" + kalman_fitter2.SummaryWriter = True + kalman_fitter2.StatesWriter = True + kalman_fitter2.SeedCovarianceScale = 1 + kalman_fitter2.RootTrajectoryStatesWriterTool = trajectory_states_writer_tool2 + kalman_fitter2.RootTrajectorySummaryWriterTool = trajectory_summary_writer_tool2 + ckf.KalmanFitterTool2 = kalman_fitter2 + ckf.TrackSeed = track_seed_tool ckf.ActsLogging = "INFO" ckf.RootTrajectoryStatesWriterTool = trajectory_states_writer_tool diff --git a/Tracking/Acts/FaserActsKalmanFilter/test/TI12CKF2.py b/Tracking/Acts/FaserActsKalmanFilter/test/TI12CKF2.py index 8d440f051ac853181d4ac82d1845b535d17c2cf0..cf63c3e400de073ad330df6498129996863b6c3a 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/test/TI12CKF2.py +++ b/Tracking/Acts/FaserActsKalmanFilter/test/TI12CKF2.py @@ -16,6 +16,7 @@ from TrackerSpacePointFormation.TrackerSpacePointFormationConfig import TrackerS from TrackerSegmentFit.TrackerSegmentFitConfig import SegmentFitAlgCfg from FaserActsKalmanFilter.GhostBustersConfig import GhostBustersCfg from FaserActsKalmanFilter.TI12CKF2Config import TI12CKF2Cfg + import argparse parser = argparse.ArgumentParser() @@ -44,12 +45,12 @@ ConfigFlags.lock() acc = MainServicesCfg(ConfigFlags) acc.merge(FaserGeometryCfg(ConfigFlags)) acc.merge(PoolWriteCfg(ConfigFlags)) -acc.merge(FaserByteStreamCnvSvcCfg(ConfigFlags)) +acc.merge(FaserByteStreamCnvSvcCfg(ConfigFlags, OccupancyCut=0.015)) acc.merge(FaserSCT_ClusterizationCfg(ConfigFlags, DataObjectName="SCT_EDGEMODE_RDOs", ClusterToolTimingPattern="01X")) acc.merge(TrackerSpacePointFinderCfg(ConfigFlags)) acc.merge(SegmentFitAlgCfg(ConfigFlags, SharedHitFraction=0.61, MinClustersPerFit=5, TanThetaXZCut=0.083)) acc.merge(GhostBustersCfg(ConfigFlags)) -acc.merge(TI12CKF2Cfg(ConfigFlags)) +acc.merge(TI12CKF2Cfg(ConfigFlags, noDiagnostics=True)) acc.getEventAlgo("CKF2").OutputLevel = DEBUG # logging.getLogger('forcomps').setLevel(VERBOSE) @@ -68,4 +69,4 @@ replicaSvc.UseCOOLFrontier = False replicaSvc.UseGeomSQLite = True sc = acc.run(maxEvents=args.nevents) -sys.exit(not sc.isSuccess()) \ No newline at end of file +sys.exit(not sc.isSuccess()) diff --git a/Waveform/WaveRecAlgs/python/WaveRecAlgsConfig.py b/Waveform/WaveRecAlgs/python/WaveRecAlgsConfig.py index 414773bf9dc2e9b8140b33d81a80220c4288e4bf..b02a8b7d2b9b22193ff2a7d5e01efa36a89730cc 100644 --- a/Waveform/WaveRecAlgs/python/WaveRecAlgsConfig.py +++ b/Waveform/WaveRecAlgs/python/WaveRecAlgsConfig.py @@ -19,11 +19,13 @@ def WaveformReconstructionCfg(flags): if not flags.Input.isMC: acc.merge(WaveformClockRecCfg(flags, "ClockRecAlg")) - acc.merge(WaveformHitRecCfg(flags, "TriggerWaveformRecAlg", "Trigger")) + if "TB" not in flags.GeoModel.FaserVersion: + acc.merge(WaveformHitRecCfg(flags, "TriggerWaveformRecAlg", "Trigger")) + acc.merge(WaveformHitRecCfg(flags, "VetoNuWaveformRecAlg", "VetoNu")) + acc.merge(WaveformHitRecCfg(flags, "VetoWaveformRecAlg", "Veto")) acc.merge(WaveformHitRecCfg(flags, "PreshowerWaveformRecAlg", "Preshower")) acc.merge(WaveformHitRecCfg(flags, "CaloWaveformRecAlg", "Calo")) - acc.merge(WaveformHitRecCfg(flags, "VetoNuWaveformRecAlg", "VetoNu")) acc.merge(WaveformTimingCfg(flags)) diff --git a/Waveform/WaveformConditions/WaveCondUtils/scripts/makeTimingDB.py b/Waveform/WaveformConditions/WaveCondUtils/scripts/makeTimingDB.py index b3a1c64a23ad23ae70eaa000e3cbd76064bebfcd..57c3390c087b7f41850716d6ec24aa7d7e7044f6 100755 --- a/Waveform/WaveformConditions/WaveCondUtils/scripts/makeTimingDB.py +++ b/Waveform/WaveformConditions/WaveCondUtils/scripts/makeTimingDB.py @@ -5,7 +5,8 @@ # Can test results with # AtlCoolConsole.py "sqlite://;schema=waveform_reco.db;dbname=OFLP200" -filename = 'waveform_reco.db' +#filename = 'waveform_reco.db' +filename = 'ALLP200.db' # Nominal trigger time in ns nominal_data = { @@ -89,14 +90,21 @@ for run, data in offset_data.items(): from PyCool import cool +from CoolConvUtilities.AtlCoolLib import indirectOpen dbSvc = cool.DatabaseSvcFactory.databaseService() connectString = f'sqlite://;schema={filename};dbname=CONDBR3' -print('Creating database') +print('Opening database') -dbSvc.dropDatabase( connectString ) -db = dbSvc.createDatabase( connectString ) +# Try to open existing, but if not create new +try: + db = indirectOpen(connectString, readOnly=False) +except Exception as e: + print(e) + print("Couldn't open, try creating new") + #dbSvc.dropDatabase( connectString ) + db = dbSvc.createDatabase( connectString ) # Nominal trigger times nominalSpec = cool.RecordSpecification() @@ -176,8 +184,16 @@ for run, data in offset_data.items(): connectString = f'sqlite://;schema={filename};dbname=OFLP200' -dbSvc.dropDatabase( connectString ) -db = dbSvc.createDatabase( connectString ) +print('Opening database') + +# Try to open existing, but if not create new +try: + db = indirectOpen(connectString, readOnly=False) +except Exception as e: + print(e) + print("Couldn't open, try creating new") + #dbSvc.dropDatabase( connectString ) + db = dbSvc.createDatabase( connectString ) # Nominal trigger times nominalSpec = cool.RecordSpecification() diff --git a/Waveform/WaveformConditions/WaveformConditionsTools/python/WaveformTimingConfig.py b/Waveform/WaveformConditions/WaveformConditionsTools/python/WaveformTimingConfig.py index 3b95ed388208d7d4ab51a3d5621ab868d438c433..dec0c3858667fc6dee844f96f82afcf6d5427e47 100644 --- a/Waveform/WaveformConditions/WaveformConditionsTools/python/WaveformTimingConfig.py +++ b/Waveform/WaveformConditions/WaveformConditionsTools/python/WaveformTimingConfig.py @@ -21,12 +21,15 @@ def WaveformTimingCfg(flags, **kwargs): # tool = kwargs.get("WaveformTimingTool", WaveformTimingTool(flags)) # Probably need to figure this out! dbInstance = kwargs.get("dbInstance", "TRIGGER_OFL") - if flags.Input.isMC: - dbname = "OFLP200" - else: - dbname = "CONDBR3" + # This is now set up globally, don't need this here + #if flags.Input.isMC: + # dbname = "OFLP200" + #else: + # dbname = "CONDBR3" - acc.merge(addFolders(flags, "/WAVE/DAQ/Timing", dbInstance, className="AthenaAttributeList", db=dbname)) - acc.merge(addFolders(flags, "/WAVE/DAQ/TimingOffset", dbInstance, className="CondAttrListCollection", db=dbname)) + #acc.merge(addFolders(flags, "/WAVE/DAQ/Timing", dbInstance, className="AthenaAttributeList", db=dbname)) + #acc.merge(addFolders(flags, "/WAVE/DAQ/TimingOffset", dbInstance, className="CondAttrListCollection", db=dbname)) + acc.merge(addFolders(flags, "/WAVE/DAQ/Timing", dbInstance, className="AthenaAttributeList")) + acc.merge(addFolders(flags, "/WAVE/DAQ/TimingOffset", dbInstance, className="CondAttrListCollection")) return acc