diff --git a/Calorimeter/CaloDigiAlgs/python/CaloDigiAlgsConfig.py b/Calorimeter/CaloDigiAlgs/python/CaloDigiAlgsConfig.py index 1b8a0355ebe97a514954cd5282b2bef462a0f0c4..c9ae148779f540c6357b67344f0c03f61ab1cbca 100644 --- a/Calorimeter/CaloDigiAlgs/python/CaloDigiAlgsConfig.py +++ b/Calorimeter/CaloDigiAlgs/python/CaloDigiAlgsConfig.py @@ -33,13 +33,14 @@ def CaloWaveformDigiCfg(flags, name="CaloWaveformDigiAlg", **kwargs): kwargs.setdefault("CaloHitContainerKey", "EcalHits") kwargs.setdefault("WaveformContainerKey", "CaloWaveforms") - kwargs.setdefault("CB_alpha", -0.9) - kwargs.setdefault("CB_n", 10) - kwargs.setdefault("CB_sigma", 4) + kwargs.setdefault("CB_alpha", -0.32) + kwargs.setdefault("CB_n", 1000) + kwargs.setdefault("CB_sigma", 3.67) kwargs.setdefault("CB_mean", 820) # Time in ns - kwargs.setdefault("CB_norm", 4) # Low gain default, use 20 for high gain + # This number is over-ridden in the digitization script, so change it there! + kwargs.setdefault("CB_norm", 5.0) # Low gain default without filters, use x5? for high gain - kwargs.setdefault("base_mean", 15000) + kwargs.setdefault("base_mean", 15650) kwargs.setdefault("base_rms", 3) digiAlg = CompFactory.CaloWaveformDigiAlg(name, **kwargs) diff --git a/Control/CalypsoExample/Digitization/scripts/faserMDC_digi.py b/Control/CalypsoExample/Digitization/scripts/faserMDC_digi.py index 5730e63389b879801c08665c548bc376b726f1d2..1154edd8aad0cd4ac8daadf5559e05ef1cc362b9 100755 --- a/Control/CalypsoExample/Digitization/scripts/faserMDC_digi.py +++ b/Control/CalypsoExample/Digitization/scripts/faserMDC_digi.py @@ -131,9 +131,9 @@ acc.merge(FaserSCT_DigitizationCfg(ConfigFlags)) from CaloDigiAlgs.CaloDigiAlgsConfig import CaloWaveformDigitizationCfg if args.highCaloGain: - calo_norm = 20. + calo_norm = 25. else: - calo_norm = 4. + calo_norm = 5. acc.merge(CaloWaveformDigitizationCfg(ConfigFlags, CB_norm=calo_norm)) from ScintDigiAlgs.ScintDigiAlgsConfig import ScintWaveformDigitizationCfg diff --git a/Control/CalypsoExample/Digitization/scripts/faserMDC_digi_merge.py b/Control/CalypsoExample/Digitization/scripts/faserMDC_digi_merge.py index cef68cb5668923644c36080cc7ed1e9e1d85950f..a421cead6c379b8dd56fc2d80ea86bb9ca8ab342 100755 --- a/Control/CalypsoExample/Digitization/scripts/faserMDC_digi_merge.py +++ b/Control/CalypsoExample/Digitization/scripts/faserMDC_digi_merge.py @@ -193,9 +193,9 @@ acc.merge(FaserSCT_DigitizationCfg(ConfigFlags)) from CaloDigiAlgs.CaloDigiAlgsConfig import CaloWaveformDigitizationCfg if args.highCaloGain: - calo_norm = 20. + calo_norm = 25. else: - calo_norm = 4. + calo_norm = 5. acc.merge(CaloWaveformDigitizationCfg(ConfigFlags, CB_norm=calo_norm)) from ScintDigiAlgs.ScintDigiAlgsConfig import ScintWaveformDigitizationCfg diff --git a/Control/CalypsoExample/Digitization/scripts/faser_digi.py b/Control/CalypsoExample/Digitization/scripts/faser_digi.py index d8cb774850b8b46b94288da30887e3146b7b0c9e..72f890a3bd3b7f352f60c66207de15aaa2ec4961 100755 --- a/Control/CalypsoExample/Digitization/scripts/faser_digi.py +++ b/Control/CalypsoExample/Digitization/scripts/faser_digi.py @@ -2,28 +2,32 @@ # # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration # Run with: -# ./faser_digi.py filepath runtype +# ./faser_digi.py filepath # # filepath - fully qualified path, including url if needed, to the input HITS file # example: "root://eospublic.cern.ch//eos/experiment/faser/sim/GeniePilot/HITS/1/faser.150fbInv.1.001.HITS.pool.root" # -# runtype - MANDATORY flag to specify the data type (TI12OldMC or TI12MC or TestBeamMC). -# Not extracted (yet) from file path for MC data +# Options: +# --geom=runtype - flag to specify the data type (TI12OldMC or TI12MC or TestBeamMC). +# default to TI12MC # import sys +import time import argparse -parser = argparse.ArgumentParser(description="Run FASER reconstruction") +a = time.time() + +parser = argparse.ArgumentParser(description="Run FASER digitization") parser.add_argument("file_path", help="Fully qualified path of the raw input file") -parser.add_argument("run_type", nargs="?", default="", - help="Specify run type (if it can't be parsed from path)") +parser.add_argument("-g", "--geom", default="TI12MC", + help="Specify geometry (default: TI12MC, alt: TestBeamMC)") parser.add_argument("-t", "--tag", default="", - help="Specify tag (to append to output filename)") + help="Specify digi tag (to append to output filename)") parser.add_argument("--highCaloGain", action='store_true', help="Use high gain settings for calo PMTs") -parser.add_argument("-n", "--nevents", type=int, default=-1, +parser.add_argument("-n", "--nevts", type=int, default=-1, help="Specify number of events to process (default: all)") parser.add_argument("-v", "--verbose", action='store_true', help="Turn on DEBUG output") @@ -35,22 +39,11 @@ from pathlib import Path filepath=Path(args.file_path) # runtype has been provided -if len(args.run_type) > 0: - runtype=args.run_type - -# Extract runtype from path -# Should be directory above run -# i.e.: TestBeamData/Run-004150/Faser-Physics-004150-00000.raw" -else: - if True or len(filepath.parts) < 3: - print("Can't determine run type from path - specify on command line ") - sys.exit(-1) - -# runtype = filepath.parts[-3] +runtype= args.geom print(f"Starting digitization of {filepath.name} with type {runtype}") -if args.nevents > 0: - print(f"Reconstructing {args.nevents} events by command-line option") +if args.nevts > 0: + print(f"Reconstructing {args.nevts} events by command-line option") # Start digitization @@ -88,21 +81,26 @@ elif runtype == "TI12MC": ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" else: - print("Invalid run type found:", runtype) - print("Specify correct type or update list") + print("Invalid geometry type found:", runtype) + print("Specify correct geometry or update list") sys.exit(-1) - # Must use original input string here, as pathlib mangles double // in path names ConfigFlags.Input.Files = [ args.file_path ] filestem = filepath.stem +# Remove any filetype modifier +if filestem[-5:] == "-HITS": + filestem = filestem[:-5] + if len(args.tag) > 0: - filestem += f"-{args.tag}" + print(f"{args.tag} in {filestem}?") + if args.tag in filestem: + print(f"Not adding tag {args.tag} to file {filestem}") + else: + filestem += f"-{args.tag}" -# ConfigFlags.addFlag("Output.xAODFileName", f"{filestem}-xAOD.root") ConfigFlags.Output.RDOFileName = f"{filestem}-RDO.root" - # # Play around with this? # ConfigFlags.Concurrency.NumThreads = 2 @@ -130,67 +128,14 @@ acc.merge(FaserSCT_DigitizationCfg(ConfigFlags)) from CaloDigiAlgs.CaloDigiAlgsConfig import CaloWaveformDigitizationCfg if args.highCaloGain: - calo_norm = 20. + calo_norm = 25. else: - calo_norm = 4. + calo_norm = 5. acc.merge(CaloWaveformDigitizationCfg(ConfigFlags, CB_norm=calo_norm)) from ScintDigiAlgs.ScintDigiAlgsConfig import ScintWaveformDigitizationCfg acc.merge(ScintWaveformDigitizationCfg(ConfigFlags)) -# from WaveRecAlgs.WaveRecAlgsConfig import WaveformReconstructionCfg -# acc.merge(WaveformReconstructionCfg(ConfigFlags)) - -# # Not ready for primetime -# # from CaloRecAlgs.CaloRecAlgsConfig import CalorimeterReconstructionCfg -# # acc.merge(CalorimeterReconstructionCfg(ConfigFlags)) - -# # Tracker clusters -# from TrackerPrepRawDataFormation.TrackerPrepRawDataFormationConfig import FaserSCT_ClusterizationCfg -# acc.merge(FaserSCT_ClusterizationCfg(ConfigFlags)) - -# # SpacePoints -# from TrackerSpacePointFormation.TrackerSpacePointFormationConfig import TrackerSpacePointFinderCfg -# acc.merge(TrackerSpacePointFinderCfg(ConfigFlags)) - -# # Try Dave's fitter -# from TrackerClusterFit.TrackerClusterFitConfig import ClusterFitAlgCfg -# acc.merge(ClusterFitAlgCfg(ConfigFlags)) - -# -# Configure output -# from OutputStreamAthenaPool.OutputStreamConfig import OutputStreamCfg -# itemList = [ "xAOD::EventInfo#*" -# , "xAOD::EventAuxInfo#*" -# , "xAOD::FaserTriggerData#*" -# , "xAOD::FaserTriggerDataAux#*" -# , "FaserSCT_RDO_Container#*" -# # , "Tracker::FaserSCT_ClusterContainer#*" -# # , "FaserSCT_SpacePointContainer#*" -# # , "FaserSCT_SpacePointOverlapCollection#*" -# # , "TrackCollection#*" -# ] -# acc.merge(OutputStreamCfg(ConfigFlags, "xAOD", itemList)) - -# Waveform reconstruction output -# from WaveRecAlgs.WaveRecAlgsConfig import WaveformReconstructionOutputCfg -# acc.merge(WaveformReconstructionOutputCfg(ConfigFlags)) - -# Calorimeter reconstruction output -# from CaloRecAlgs.CaloRecAlgsConfig import CalorimeterReconstructionOutputCfg -# acc.merge(CalorimeterReconstructionOutputCfg(ConfigFlags)) - -# Check what we have -# print( "Writing out xAOD objects:" ) -# print( acc.getEventAlgo("OutputStreamxAOD").ItemList ) - -# Hack to avoid problem with our use of MC databases when isMC = False -# replicaSvc = acc.getService("DBReplicaSvc") -# replicaSvc.COOLSQLiteVetoPattern = "" -# replicaSvc.UseCOOLSQLite = True -# replicaSvc.UseCOOLFrontier = False -# replicaSvc.UseGeomSQLite = True - # Configure verbosity if args.verbose: acc.foreach_component("*").OutputLevel = VERBOSE @@ -204,4 +149,10 @@ acc.foreach_component("*ClassID*").OutputLevel = INFO acc.getService("MessageSvc").Format = "% F%40W%S%7W%R%T %0W%M" # Execute and finish -sys.exit(int(acc.run(maxEvents=args.nevents).isFailure())) +sc = acc.run(maxEvents=args.nevts) + +b = time.time() +from AthenaCommon.Logging import log +log.info(f"Finish execution in {b-a} seconds") + +sys.exit(not sc.isSuccess()) diff --git a/Control/CalypsoExample/Digitization/scripts/faser_digi_merge.py b/Control/CalypsoExample/Digitization/scripts/faser_digi_merge.py new file mode 100755 index 0000000000000000000000000000000000000000..89fe4fb095846cba78e799b272fafd81302b757f --- /dev/null +++ b/Control/CalypsoExample/Digitization/scripts/faser_digi_merge.py @@ -0,0 +1,235 @@ +#!/usr/bin/env python +# +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Run with: +# ./faser_digi_merge.py dirpath +# +# filepath - fully qualified path, to the directory with input HITS file +# example: "/eos/experiment/faser/sim/tb21/particle_gun/000100/rdo/test" +# +# Options: +# --geom=runtype - flag to specify the data type (TI12OldMC or TI12MC or TestBeamMC). +# default to TI12MC +# +import sys +import time +import argparse + +a = time.time() + +parser = argparse.ArgumentParser(description="Run FASER digitization") + +parser.add_argument("dir_path", + help="Fully qualified path of the input file directory") +parser.add_argument("-p", "--partial", action="store_true", + help="Allow partial merge (default: all specified files required)") +parser.add_argument("-g", "--geom", default="TI12MC", + help="Specify geometry (default: TI12MC, alt: TestBeamMC)") +parser.add_argument("-s", "--slice", type=int, default=0, + help="Specify file slice to produce") +parser.add_argument("-f", "--files", type=int, default=5, + help="Specify number of input files to run in one batch") +parser.add_argument("-t", "--tag", default="", + help="Specify digi tag (to append to output filename)") +parser.add_argument("--highCaloGain", action='store_true', + help="Use high gain settings for calo PMTs") +parser.add_argument("-n", "--nevts", type=int, default=-1, + help="Specify number of events to process (default: all)") +parser.add_argument("-v", "--verbose", action='store_true', + help="Turn on DEBUG output") + +args = parser.parse_args() + +from pathlib import Path + +dirpath = Path(args.dir_path) + +# runtype has been provided +runtype=args.geom + +# Does the directory exist? +if not (dirpath.exists() and dirpath.is_dir()): + print(f"Problem with directory {args.dir_path}") + sys.exit(1) + +# Create segment list +seglist = list(range(args.slice*args.files, (args.slice+1)*args.files)) + +# Now build file list +filelist = [] +dirlist = list(dirpath.glob('FaserMC-*-HITS.root')) +if len(dirlist) == 0: + print(f"No HITS file found in directory {args.dir_path}") + sys.exit(1) + +for seg in seglist: + # Assume these are in numerical order from 0 + if seg >= len(dirlist): + print(f"Requested file segment {seg} but only {len(dirlist)} files found") + if args.partial: + break + else: + sys.exit(1) # Abort this job + + # Check if segment number exists in hits file (this is not perfect) + segstr = f"{seg:05d}" + if segstr not in dirlist[seg]: + print(f"Segment {segstr} not in file {dirlist[seg]}!") + if not args.partial: sys.exit(1) # abort + filelist.append(dirlist[seg]) + +if len(filelist) == 0: + # Asked for range that doesn't exist + print(f"No files found for slice {args.slice} with Nfiles={args.files}") + sys.exit(1) + +# Figure out the file pattern for the output +stem = filelist[0].stem +spl = stem.split('-') +short = spl[1] +run = spl[2] +seglo = int(spl[3]) +# Can be multiple tags +tagstr = '' +for tag in spl[4:]: + if tag == "HITS": break + if len(tagstr) > 0: + tagstr += "-" + tagstr += tag + +# Also find the largest file number +stem = filelist[-1].stem +spl = stem.split('-') +seghi = int(spl[3]) + +# Build output filename +if seglo == 0 and (seghi+1) == len(dirlist): # Full run + outfile = f"FaserMC-{short}-{run}" +elif seglo == seghi: # Single segment + outfile = f"FaserMC-{short}-{run}-{seglo:05}" +else: + outfile = f"FaserMC-{short}-{run}-{seglo:05}-{seghi:05}" + +# Add existing tag +if len(tagstr) > 0: + outfile += f"-{tagstr}" + +# Was a tag requested? +if len(args.tag) > 0: + if args.tag in tagstr: + print(f"Not adding tag {args.tag} to file {filelist[0]}") + else: + outfile += f"-{args.tag}" + +# Finish output file +outfile += "-RDO.root" + +print(f"Found files from {seglo} to {seghi}") +print(f"Starting digitization of outfile {outfile} with type {runtype}") +if args.nevts > 0: + print(f"Reconstructing {args.nevts} events by command-line option") + +# Start digitization + +from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator +from AthenaConfiguration.ComponentFactory import CompFactory +from AthenaCommon.Constants import VERBOSE, INFO + +from AthenaCommon.Configurable import Configurable +from CalypsoConfiguration.AllConfigFlags import ConfigFlags + +Configurable.configurableRun3Behavior = True + +# Flags for this job +ConfigFlags.Input.isMC = True # Needed to bypass autoconfig +ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions for now + +ConfigFlags.Input.ProjectName = "mc20" +ConfigFlags.GeoModel.Align.Dynamic = False +ConfigFlags.Beam.NumberOfCollisions = 0. +ConfigFlags.Digitization.TruthOutput = True + +# TI12 old geometry +if runtype == "TI12OldMC": + ConfigFlags.GeoModel.FaserVersion = "FASER-01" + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-01" + +# Testbeam setup +elif runtype == "TestBeamMC" : + ConfigFlags.GeoModel.FaserVersion = "FASER-TB00" + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-TB00" + +# New TI12 geometry (ugh) +elif runtype == "TI12MC": + ConfigFlags.GeoModel.FaserVersion = "FASERNU-03" + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" + +else: + print("Invalid run type found:", runtype) + print("Specify correct type or update list") + sys.exit(-1) + + +# Try just passing the filelist +if args.dir_path[:22] == '/eos/experiment/faser/': + ConfigFlags.Input.Files = [f"root://eospublic.cern.ch/{str(file)}" for file in filelist] +else: + ConfigFlags.Input.Files = [str(file) for file in filelist] + +ConfigFlags.Output.RDOFileName = outfile + +# +# Play around with this? +# ConfigFlags.Concurrency.NumThreads = 2 +# ConfigFlags.Concurrency.NumConcurrentEvents = 2 +ConfigFlags.lock() + +# +# Configure components +from CalypsoConfiguration.MainServicesConfig import MainServicesCfg +from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg +from AthenaPoolCnvSvc.PoolWriteConfig import PoolWriteCfg + +acc = MainServicesCfg(ConfigFlags) +acc.merge(PoolReadCfg(ConfigFlags)) +acc.merge(PoolWriteCfg(ConfigFlags)) + +# +# Needed, or move to MainServicesCfg? +from FaserGeoModel.FaserGeoModelConfig import FaserGeometryCfg +acc.merge(FaserGeometryCfg(ConfigFlags)) + +# Set up algorithms +from FaserSCT_Digitization.FaserSCT_DigitizationConfigNew import FaserSCT_DigitizationCfg +acc.merge(FaserSCT_DigitizationCfg(ConfigFlags)) + +from CaloDigiAlgs.CaloDigiAlgsConfig import CaloWaveformDigitizationCfg +if args.highCaloGain: + calo_norm = 25. +else: + calo_norm = 5. +acc.merge(CaloWaveformDigitizationCfg(ConfigFlags, CB_norm=calo_norm)) + +from ScintDigiAlgs.ScintDigiAlgsConfig import ScintWaveformDigitizationCfg +acc.merge(ScintWaveformDigitizationCfg(ConfigFlags)) + +# Configure verbosity +if args.verbose: + acc.foreach_component("*").OutputLevel = VERBOSE + ConfigFlags.dump() + +else: + acc.foreach_component("*").OutputLevel = INFO + +acc.foreach_component("*ClassID*").OutputLevel = INFO + +acc.getService("MessageSvc").Format = "% F%40W%S%7W%R%T %0W%M" + +# Execute and finish +sc = acc.run(maxEvents=args.nevts) + +b = time.time() +from AthenaCommon.Logging import log +log.info(f"Finish execution in {b-a} seconds") + +sys.exit(not sc.isSuccess()) diff --git a/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi.sh b/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi.sh index 7b1734797cfabcd3a706d389fc8ac396a6ab6248..b6b0c80c42a44e1d3731bd92e2fa800aa8036316 100755 --- a/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi.sh +++ b/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi.sh @@ -6,6 +6,8 @@ # # Options: # --highGain - apply high gain settings to the Calorimeter PMTs (for muons) +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file # # filepath - full file name (with path) # release_directory - optional path to release install directory (default pwd) @@ -30,6 +32,20 @@ do highgain=1 shift;; # This 'eats' the argument + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + -*) echo "Unknown option $1" shift;; @@ -86,7 +102,8 @@ output_directory="$working_directory/$run_number" mkdir -p "$output_directory" # # This magic redirects everything in this script to our log file -exec >& "$output_directory/$file_stem.log" +logfile="${file_stem}.rdo.log" +exec >& "${output_directory}/${logfile}" echo `date` - $HOSTNAME echo "File: $file_name" echo "Release: $release_directory" @@ -160,3 +177,31 @@ fi # Print out ending time date echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-RDO.root to $outdest" + mkdir -p $outdest + eos cp *-RDO.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi_merge.sh b/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi_merge.sh index 7963cda28222ec45e13242e1eaebd96595d62a27..ca2e27fcc6956e1ff180dacc41cf1f9ee2e304bf 100755 --- a/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi_merge.sh +++ b/Control/CalypsoExample/Digitization/scripts/submit_faserMDC_digi_merge.sh @@ -6,6 +6,8 @@ # # Options: # --highGain - apply high gain settings to the Calorimeter PMTs (for muons) +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file # # dirpath - full directory path to HITS files # slice - ordinal output file number @@ -32,6 +34,20 @@ do highgain=1 shift;; # This 'eats' the argument + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + -*) echo "Unknown option $1" shift;; @@ -107,7 +123,8 @@ mkdir -p "$output_directory" file_stem="$faser-$short-$run_number-RDO-merge-$slice" # # This magic redirects everything in this script to our log file -exec >& "$output_directory/$file_stem.log" +logfile="${file_stem}.rdo.log" +exec >& "$output_directory/$logfile" echo `date` - $HOSTNAME echo "Directory: $dir_path" echo "Slice: $slice" @@ -183,3 +200,31 @@ fi # Print out ending time date echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-RDO.root to $outdest" + mkdir -p $outdest + eos cp *-RDO.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Digitization/scripts/submit_faser_digi.sh b/Control/CalypsoExample/Digitization/scripts/submit_faser_digi.sh new file mode 100644 index 0000000000000000000000000000000000000000..76abf8172614a17a1f3290f45896f1c6978b5439 --- /dev/null +++ b/Control/CalypsoExample/Digitization/scripts/submit_faser_digi.sh @@ -0,0 +1,223 @@ +#!/bin/bash +# Used with a condor file to submit to vanilla universe +# +# Usage: +# submit_faser_digi.sh [--highGain] filepath [release_directory] [working_directory] +# +# Options: +# --highGain - apply high gain settings to the Calorimeter PMTs (for muons) +# --geom - geometry setting +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file +# +# filepath - full file name (with path) +# release_directory - optional path to release install directory (default pwd) +# working_directory - optional path to output directory location (default pwd) +# +# The release directory must already be set up +# (so an unqualified asetup can set up the release properly) +# +# Script will use git describe to find the release tag. +# If this matches sim/s???? or digi/d???? it will be passed to the job +# +#---------------------------------------- +# Keep track of time +SECONDS=0 +# +# Parse command-line options +while [ -n "$1" ] +do + case "$1" in + --highGain) + echo "Applying high gain settings" + highgain=1 + shift;; # This 'eats' the argument + + -g | --geom) + geom="$2"; + shift; + shift;; + + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done +# +# Parse command-line options +file_path=${1} +release_directory=${2} +working_directory=${3} +# +# Set defaults if arguments aren't provided +if [ -z "$file_path" ] +then + echo "No file specified!" + echo "Usage: submit_faser_digi.sh [--highGain] file [release dir] [output dir]" + exit 1 +fi +# +if [ -z "$release_directory" ] +then + release_directory=`pwd` +fi +# +if [ -z "$working_directory" ] +then + working_directory=`pwd` +fi +# +starting_directory=`pwd` +# +# Now extract the run number and file stem +# +# First, get the filename +file_name=$(basename "$file_path") +# +# Now split based on '.' to get stem +defaultIFS=$IFS +IFS='.' +read file_stem ext <<< "$file_name" +# +# Finally extract the run number +IFS='-' +# Read the split words into an array based on delimiter +read faser short run_number segment <<< "$file_stem" +# +# Set the IFS delimeter back or else echo doesn't work... +IFS=$defaultIFS +# +# Make output directory if needed +output_directory="$working_directory/$run_number" +mkdir -p "$output_directory" +# +# This magic redirects everything in this script to our log file +logfile="${file_stem}.rdo.log" +exec >& "${output_directory}/${logfile}" +echo `date` - $HOSTNAME +echo "File: $file_name" +echo "Geom: $geom" +echo "Release: $release_directory" +echo "Output: $output_directory" +echo "Starting: $starting_directory" +# +# Set up the release (do this automatically)? +export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase +source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh +# +# Try automatic +# Always go back to the starting directory in case paths are relative +cd "$starting_directory" +cd "$release_directory" +# asetup +# source build/x8*/setup.sh +# +# Do this by hand +asetup --input=calypso/asetup.faser Athena,22.0.49 +source run/setup.sh +#source build/x86*/setup.sh +# +# +# Try to find a release tag +cd calypso +recotag=`git describe` +if [[ "$recotag" == "reco/r"???? ]]; then + tag=`echo "$recotag" | cut -c 6-11` + echo "Found reco tag: $tag" +fi +if [[ "$recotag" == "digi/d"???? ]]; then + tag=`echo "$recotag" | cut -c 6-11` + echo "Found digi tag: $tag" +fi +if [[ "$recotag" == "sim/s"???? ]]; then + tag=`echo "$recotag" | cut -c 5-10` + echo "Found sim tag: $tag" +fi +# +# Move to the run directory +cd "$starting_directory" +cd "$output_directory" +# +# Remove any previous directory if it exists +#if [[ -e "$file_stem" ]]; then +# echo "Remove previous directory $file_stem" +# rm -rf "$file_stem" +#fi +# +# Make run directory +if [[ -e "$file_stem" ]]; then + echo "Directory $file_stem already exists" +else + mkdir "$file_stem" +fi +cd "$file_stem" +# +# Run job +# +if [[ -z "$highgain" ]]; then + gainstr="" +else + gainstr="--highCaloGain" +fi +# +if [[ -z "$geom" ]]; then + geomstr="" +else + geomstr="--geom $geom" +fi +# +if [[ -z "$tag" ]]; then + tagstr="" +else + tagstr="--tag=$tag" +fi +# +faser_digi.py $geomstr $gainstr $tagstr "$file_path" +# +# Print out ending time +date +echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-RDO.root to $outdest" + mkdir -p $outdest + eos cp *-RDO.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Digitization/scripts/submit_faser_digi_merge.sh b/Control/CalypsoExample/Digitization/scripts/submit_faser_digi_merge.sh new file mode 100755 index 0000000000000000000000000000000000000000..fd2fe20e2d5f3b5829a6f57dbb83d9a6480ab6b6 --- /dev/null +++ b/Control/CalypsoExample/Digitization/scripts/submit_faser_digi_merge.sh @@ -0,0 +1,245 @@ +#!/bin/bash +# Used with a condor file to submit to vanilla universe +# +# Usage: +# submit_faser_digi_merge.sh [--highGain] dirpath slice nfiles [release_directory] [working_directory] +# +# Options: +# --highGain - apply high gain settings to the Calorimeter PMTs (for muons) +# --geom - geometry setting +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file +# +# dirpath - full directory path to HITS files +# slice - ordinal output file number +# nfiles - number of HITS files to process per slice +# release_directory - optional path to release install directory (default pwd) +# working_directory - optional path to output directory location (default pwd) +# +# The release directory must already be set up +# (so an unqualified asetup can set up the release properly) +# +# Script will use git describe to find the release tag. +# If this matches sim/s???? or digi/d???? it will be passed to the job +# +#---------------------------------------- +# Keep track of time +SECONDS=0 +# +# Job options strings +gainstr="" +partialstr="" +geomstr="" +# +# Parse command-line options +while [ -n "$1" ] +do + case "$1" in + --highGain) + echo "Applying high gain settings" + gainstr="--highCaloGain" + shift;; # This 'eats' the argument + + --partial) + echo "Allowing partial merge" + partialstr="--partial" + shift;; + + -g | --geom) + geomstr="--geom $2"; + shift; + shift;; + + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done +# +# Parse command-line options +dir_path=${1} +slice=${2} +nfiles=${3} +release_directory=${4} +working_directory=${5} +# +# Set defaults if arguments aren't provided +if [ -z "$dir_path" ] +then + echo "No directory specified!" + echo "Usage: submit_faser_digi_merge.sh directory slice nfiles [release dir] [output dir]" + exit 1 +fi +# +if [ -z "$slice" ] +then + echo "Slice number not specified!" + echo "Usage: submit_faser_digi_merge.sh directory slice nfiles [release dir] [output dir]" + exit 1 +fi +# +if [ -z "$nfiles" ] +then + echo "Files per slice not specified!" + echo "Usage: submit_faser_digi_merge.sh directory slice nfiles [release dir] [output dir]" + exit 1 +fi +# +if [ -z "$release_directory" ] +then + release_directory=`pwd` +fi +# +if [ -z "$working_directory" ] +then + working_directory=`pwd` +fi +# +starting_directory=`pwd` +# +# Now extract the run number and file stem +# +# First, get an example filename +file_name=`ls -1 $dir_path | head -1` +# +# Now split based on '.' to get stem +defaultIFS=$IFS +IFS='.' +read file_stem ext <<< "$file_name" +# +# Finally extract the run number +IFS='-' +# Read the split words into an array based on delimiter +read faser short run_number segment <<< "$file_stem" +# +# Set the IFS delimeter back or else echo doesn't work... +IFS=$defaultIFS +# +# Make output directory if needed +output_directory="$working_directory/$run_number" +mkdir -p "$output_directory" +# +# Need to make up an output name +file_stem="$faser-$short-$run_number-RDO-merge-$slice" +# +# This magic redirects everything in this script to our log file +logfile="${file_stem}.rdo.log" +exec >& "$output_directory/$logfile" +echo `date` - $HOSTNAME +echo "Directory: $dir_path" +echo "Geom: $geom" +echo "Slice: $slice" +echo "NFiles: $nfiles" +echo "Release: $release_directory" +echo "Output: $output_directory" +echo "Starting: $starting_directory" +echo "job: $file_stem" +# +# Set up the release (do this automatically)? +export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase +source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh +# +# Try automatic +# Always go back to the starting directory in case paths are relative +cd "$starting_directory" +cd "$release_directory" +# asetup +# source build/x8*/setup.sh +# +# Do this by hand +asetup --input=calypso/asetup.faser Athena,22.0.49 +# source build/x86*/setup.sh +source run/setup.sh +# +# +# Try to find a release tag +cd calypso +recotag=`git describe` +if [[ "$recotag" == "reco/r"???? ]]; then + tag=`echo "$recotag" | cut -c 6-11` + echo "Found reco tag: $tag" +fi +if [[ "$recotag" == "digi/d"???? ]]; then + tag=`echo "$recotag" | cut -c 6-11` + echo "Found digi tag: $tag" +fi +if [[ "$recotag" == "sim/s"???? ]]; then + tag=`echo "$recotag" | cut -c 5-10` + echo "Found sim tag: $tag" +fi +# +if [[ -z "$tag" ]]; then + tagstr="" +else + tagstr="-- $tag" +fi +# +# Move to the run directory +cd "$starting_directory" +cd "$output_directory" +# +# Remove any previous directory if it exists +#if [[ -e "$file_stem" ]]; then +# echo "Remove previous directory $file_stem" +# rm -rf "$file_stem" +#fi +# +# Make run directory +if [[ -e "$file_stem" ]]; then + echo "Directory $file_stem already exists" +else + mkdir "$file_stem" +fi +cd "$file_stem" +# +# Run job +# +faser_digi_merge.py $partialstr $geomstr $gainstr $tagstr --slice $slice --files $nfiles $dir_path +# +# Print out ending time +date +echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-RDO.root to $outdest" + mkdir -p $outdest + eos cp *-RDO.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_higain_logE-101305.json b/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_higain_logE-101305.json new file mode 100644 index 0000000000000000000000000000000000000000..6ebccb3062205aa73c48b2953d36e2decf307ef4 --- /dev/null +++ b/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_higain_logE-101305.json @@ -0,0 +1,13 @@ +{ + "file_length": 2000, + "mass": 105.66, + "maxE": 5000.0, + "minE": 10.0, + "pid": [-13, 13], + "radius": -100.0, + "run": 101305, + "sampler": "log", + "segment": 0, + "short": "MDC_PG_muon_fasernu_higain_logE", + "zpos": -3990.0 +} diff --git a/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101302.json b/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101302.json index 76ba19e1146b9107c3365cc5c5dd867ff26e6c94..1c3d9b606c4ffc0d31974c7d0592cf2bf1d68801 100644 --- a/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101302.json +++ b/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101302.json @@ -9,5 +9,5 @@ "sampler": "log", "segment": 0, "short": "MDC_PG_muon_fasernu_logE", - "zpos": -4000.0 + "zpos": -3990.0 } diff --git a/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101306.json b/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101306.json new file mode 100644 index 0000000000000000000000000000000000000000..d6b9e7b58fe233b6f8099bb8ad15f979ecbcd027 --- /dev/null +++ b/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101306.json @@ -0,0 +1,14 @@ +{ + "file_length": 5000, + "mass": 105.66, + "maxE": 5000.0, + "minE": 10.0, + "pid": [-13, 13], + "radius": -2.5, + "angle": 0.0006, + "run": 101306, + "sampler": "log", + "segment": 0, + "short": "MDC_PG_muon_fasernu_logE", + "zpos": -3990.0 +} diff --git a/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101307.json b/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101307.json new file mode 100644 index 0000000000000000000000000000000000000000..db2954f994dd7c74c9fb30b8e9bda28ac3eea966 --- /dev/null +++ b/Control/CalypsoExample/Generation/data/mdc/FaserMC-MDC_PG_muon_fasernu_logE-101307.json @@ -0,0 +1,14 @@ +{ + "file_length": 5000, + "mass": 105.66, + "maxE": 5000.0, + "minE": 10.0, + "pid": [-13, 13], + "radius": -25., + "angle": 0.0006, + "run": 101307, + "sampler": "log", + "segment": 0, + "short": "MDC_PG_muon_fasernu_logE", + "zpos": -3990.0 +} diff --git a/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_100GeV-000200.json b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_100GeV-000200.json new file mode 100644 index 0000000000000000000000000000000000000000..5296e25aa0b1e77b34d6a5c870aa0b342c446331 --- /dev/null +++ b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_100GeV-000200.json @@ -0,0 +1,14 @@ +{ + "file_length": 1000, + "geom": "TestBeamMC", + "mass": 0.511, + "maxE": 100.0, + "minE": 100.0, + "pid": [-11, 11], + "radius": -100.0, + "run": 200, + "sampler": "const", + "segment": 0, + "short": "TB_PG_elec_100GeV", + "zpos": -1000.0 +} diff --git a/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_200GeV-000203.json b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_200GeV-000203.json new file mode 100644 index 0000000000000000000000000000000000000000..d05ca3f6bab5c29e3bee3e792b1fc8274e8749f8 --- /dev/null +++ b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_200GeV-000203.json @@ -0,0 +1,14 @@ +{ + "file_length": 1000, + "geom": "TestBeamMC", + "mass": 0.511, + "maxE": 200.0, + "minE": 200.0, + "pid": [-11, 11], + "radius": -100.0, + "run": 203, + "sampler": "const", + "segment": 0, + "short": "TB_PG_elec_200GeV", + "zpos": -1000.0 +} diff --git a/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_30GeV-000202.json b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_30GeV-000202.json new file mode 100644 index 0000000000000000000000000000000000000000..332f05dbadc980e83872766ecff39e05c985f4af --- /dev/null +++ b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_30GeV-000202.json @@ -0,0 +1,14 @@ +{ + "file_length": 1000, + "geom": "TestBeamMC", + "mass": 0.511, + "maxE": 30.0, + "minE": 30.0, + "pid": [-11, 11], + "radius": -100.0, + "run": 202, + "sampler": "const", + "segment": 0, + "short": "TB_PG_elec_30GeV", + "zpos": -1000.0 +} diff --git a/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_50GeV-000201.json b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_50GeV-000201.json new file mode 100644 index 0000000000000000000000000000000000000000..6423bea683751eef9fad1a7e07974ba833f590b7 --- /dev/null +++ b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_elec_50GeV-000201.json @@ -0,0 +1,14 @@ +{ + "file_length": 1000, + "geom": "TestBeamMC", + "mass": 0.511, + "maxE": 50.0, + "minE": 50.0, + "pid": [-11, 11], + "radius": -100.0, + "run": 201, + "sampler": "const", + "segment": 0, + "short": "TB_PG_elec_50GeV", + "zpos": -1000.0 +} diff --git a/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_muon_100GeV-000100.json b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_muon_100GeV-000100.json new file mode 100644 index 0000000000000000000000000000000000000000..d6c6eee30676d772ab4a9fdd6cee9e46365658cd --- /dev/null +++ b/Control/CalypsoExample/Generation/data/tb21/FaserMC-TB_PG_muon_100GeV-000100.json @@ -0,0 +1,14 @@ +{ + "file_length": 2000, + "geom": "TestBeamMC", + "mass": 105.66, + "maxE": 100.0, + "minE": 100.0, + "pid": [-13, 13], + "radius": -100.0, + "run": 100, + "sampler": "const", + "segment": 0, + "short": "TB_PG_muon_100GeV", + "zpos": -1000.0 +} diff --git a/Control/CalypsoExample/Generation/python/faser_parser.py b/Control/CalypsoExample/Generation/python/faser_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..43f98387e7089cfdf258531ec57aeea072b70f53 --- /dev/null +++ b/Control/CalypsoExample/Generation/python/faser_parser.py @@ -0,0 +1,175 @@ +# +# Copyright (C) 2022 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2022 CERN for the benefit of the FASER collaboration +# +# Parser function for particle gun samples +# +def faser_pgparser(): + + import sys + import json + import argparse + + parser = argparse.ArgumentParser(description="Run FASER ParticleGun Simulation") + + parser.add_argument("--conf", action='append', + help="Specify configuration file with default values") + parser.add_argument("--geom", default="TI12MC", + help="Specify geomtery to simulation (default: TI12MC, alt: TestBeamMC)") + + parser.add_argument("--run", default=123456, type=int, + help="Run number to generate") + parser.add_argument("--segment", default=00000, type=int, + help="Segment number to generate") + parser.add_argument("--file_length", default=1000, type=int, + help="Total events per file segement") + + parser.add_argument("--short", default="PG_logE", + help="Short description for filename") + parser.add_argument("--tag", default=None, + help="Generator tag (e.g.: g0000)") + + parser.add_argument("--pid", default=[-13, 13], type=int, nargs='*', + help="Specify PDG ID of particle (note plus/minus different) or list (e.g.: --pid -13 13)") + parser.add_argument("--mass", default=105.66, type=float, + help="Specify particle mass (in MeV)") + parser.add_argument("--radius", default=100., type=float, + help="Specify radius (in mm)") + parser.add_argument("--angle", default=0.005, type=float, + help="Specify angular width (in Rad)") + parser.add_argument("--zpos", default=None, type=float, + help="Specify z position of particles (in mm) (helpful to avoid FASERnu)") + + parser.add_argument("--sampler", default="log", + help="Specify energy sampling (log, lin, const)") + parser.add_argument("--minE", default=10., type=float, + help="Minimum energy in GeV (for log or lin sampler)") + parser.add_argument("--maxE", default=1000., type=float, + help="Maximum energy (or constant) in GeV") + + parser.add_argument("--nevts", default=-1, type=int, + help="Number of events to generate (for debugging)") + parser.add_argument("--dump", action='store_true', + help="Write out full configuration") + parser.add_argument("--noexec", action='store_true', + help="Exit after parsing configuration (no execution)") + + pg_args = parser.parse_args() + + # Get defaults + if pg_args.conf is not None: + for conf_fname in pg_args.conf: + with open(conf_fname, 'r') as f: + parser.set_defaults(**json.load(f)) + + # Reload arguments to override config file with command line + pg_args = parser.parse_args() + + # Print out configuration if requested + if pg_args.dump: + tmp_args = vars(pg_args).copy() + del tmp_args['dump'] # Don't dump the dump value + del tmp_args['conf'] # Don't dump the conf file name either + del tmp_args['nevts'] # Debugging, not part of configuration + del tmp_args['noexec'] # Debugging, not part of configuration + print("Configuration:") + print(json.dumps(tmp_args, indent=4, sort_keys=False)) + + if pg_args.noexec: + sys.exit(0) + + # + # Add some derived quantities + # + + # Create the file name also (could add gentag here) + pg_args.outfile = f"FaserMC-{pg_args.short}-{pg_args.run:06}-{pg_args.segment:05}" + + if pg_args.tag: + pg_args.outfile += f"-{pg_args.tag}" + + pg_args.outfile += "-HITS.root" + + return pg_args + +# All done + +# +# Parser function for Foresee samples +# +def faser_fsparser(): + + import sys + import json + import argparse + + parser = argparse.ArgumentParser(description="Run FASER Foresee Simulation") + + parser.add_argument("--conf", action='append', + help="Specify configuration file with default values") + parser.add_argument("--run", default=123456, type=int, + help="Run number to generate") + parser.add_argument("--segment", default=00000, type=int, + help="Segment number to generate") + parser.add_argument("--file_length", default=1000, type=int, + help="Total events per file segement") + + parser.add_argument("--model", help="Model name") + parser.add_argument("--model_path", help="Path to model phase space file") + + parser.add_argument("--short", default="PG_logE", + help="Short description for filename") + parser.add_argument("--tag", default=None, + help="Generator tag (g0000)") + + parser.add_argument("--pid", default=[-13, 13], type=int, nargs=2, + help="Specify PDG ID of daughter particles") + parser.add_argument("--mass", default=105.66, type=float, + help="Specify particle mass (in MeV)") + + parser.add_argument("--nevts", default=-1, type=int, + help="Number of events to generate (for debugging)") + parser.add_argument("--dump", action='store_true', + help="Write out full configuration") + parser.add_argument("--noexec", action='store_true', + help="Exit after parsing configuration (no execution)") + + fs_args = parser.parse_args() + + # Get defaults + if fs_args.conf is not None: + for conf_fname in fs_args.conf: + with open(conf_fname, 'r') as f: + parser.set_defaults(**json.load(f)) + + # Reload arguments to override config file with command line + fs_args = parser.parse_args() + + # Print out configuration if requested + if fs_args.dump: + tmp_args = vars(fs_args).copy() + del tmp_args['dump'] # Don't dump the dump value + del tmp_args['conf'] # Don't dump the conf file name either + del tmp_args['nevts'] # Debugging, not part of configuration + del tmp_args['noexec'] # Debugging, not part of configuration + print("Configuration:") + print(json.dumps(tmp_args, indent=4, sort_keys=False)) + + if fs_args.noexec: + sys.exit(0) + + # + # Add some derived quantities + # + + # Create the file name also (could add gentag here) + fs_args.outfile = f"FaserMC-{fs_args.short}-{fs_args.run:06}-{fs_args.segment:05}" + + if fs_args.tag: + fs_args.outfile += f"-{fs_args.tag}" + + fs_args.outfile += "-HITS.root" + + return fs_args + +# All done diff --git a/Control/CalypsoExample/Generation/scripts/faserMDC_foresee.py b/Control/CalypsoExample/Generation/scripts/faserMDC_foresee.py index b48bbf861ed4bbac6e0a6fb6cc9fd95da5049138..f16b6a40a0413278097824bb7af63b51811eaa9c 100755 --- a/Control/CalypsoExample/Generation/scripts/faserMDC_foresee.py +++ b/Control/CalypsoExample/Generation/scripts/faserMDC_foresee.py @@ -124,7 +124,7 @@ if __name__ == '__main__': # # MDC geometry configuration # - detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion'] + detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion', 'Trench'] # # Setup detector flags # diff --git a/Control/CalypsoExample/Generation/scripts/faserMDC_particlegun.py b/Control/CalypsoExample/Generation/scripts/faserMDC_particlegun.py index 12fe153bf0ccc712c8b72992bf27a60260b84bc4..98975a548601209a81b932c7cbbbe03da7184035 100755 --- a/Control/CalypsoExample/Generation/scripts/faserMDC_particlegun.py +++ b/Control/CalypsoExample/Generation/scripts/faserMDC_particlegun.py @@ -135,7 +135,7 @@ if __name__ == '__main__': # # MDC geometry configuration # - detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion'] + detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion', 'Trench'] # # Setup detector flags # diff --git a/Control/CalypsoExample/Generation/scripts/faser_particlegun.py b/Control/CalypsoExample/Generation/scripts/faser_particlegun.py index ee38005df79c3f6aab97b5c800f3949521110a02..6abdfdd4a8c8ae18547462b68532bd51cc4a9d71 100755 --- a/Control/CalypsoExample/Generation/scripts/faser_particlegun.py +++ b/Control/CalypsoExample/Generation/scripts/faser_particlegun.py @@ -2,27 +2,36 @@ """ Produce particle gun samples Derived from G4FaserAlgConfigNew -This is a general low-level script, -although this could be useful for testing. Usage: -faser_particlegun.py <options> - -Control using command-line options: ---evtMax=1000 ---skipEvt=1000 - -Output.HITSFileName=<name> -Sim.Gun='{"pid" : 11, "z": -1500.}' +faser_particlegun.py --conf=<config_file> Copyright (C) 2002-2021 CERN for the benefit of the ATLAS and FASER collaborations """ if __name__ == '__main__': + import sys import time a = time.time() # +# Parse command-line options +# + from Generation.faser_parser import faser_pgparser + args = faser_pgparser() +# +# Figure out events to run and skip +# + nskipped = args.segment*args.file_length + if args.nevts > 0: + nevents = args.nevts + else: + nevents = args.file_length +# +# Print out what we are doing +# + print(f"Generating {nevents} evnts into file {args.outfile}") +# # Set up logging and config behaviour # from AthenaCommon.Logging import log @@ -34,21 +43,22 @@ if __name__ == '__main__': # Import and set config flags # from CalypsoConfiguration.AllConfigFlags import ConfigFlags - ConfigFlags.Exec.MaxEvents = 10 # can be overridden from command line with --evtMax=<number> - ConfigFlags.Exec.SkipEvents = 0 # can be overridden from command line with --skipEvt=<number> + ConfigFlags.Exec.MaxEvents = nevents + ConfigFlags.Exec.SkipEvents = nskipped from AthenaConfiguration.Enums import ProductionStep ConfigFlags.Common.ProductionStep = ProductionStep.Simulation # # All these must be specified to avoid auto-configuration # - ConfigFlags.Input.RunNumber = [12345] #Isn't updating - todo: investigate + ConfigFlags.Input.RunNumber = [args.run] ConfigFlags.Input.OverrideRunNumber = True - ConfigFlags.Input.LumiBlockNumber = [1] + ConfigFlags.Input.LumiBlockNumber = [(args.segment+1)] ConfigFlags.Input.isMC = True + ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions # # Output file name # - ConfigFlags.Output.HITSFileName = "my.HITS.pool.root" # can be overridden from command line with Output.HITSFileName=<name> + ConfigFlags.Output.HITSFileName = args.outfile # # Sim ConfigFlags # @@ -59,46 +69,86 @@ if __name__ == '__main__': ConfigFlags.addFlag("Sim.Gun",{"Generator" : "SingleParticle"}) # Property bag for particle gun keyword:argument pairs ConfigFlags.addFlag("Sim.Beam.xangle", 0) # Potential beam crossing angles ConfigFlags.addFlag("Sim.Beam.yangle", 0) - - ConfigFlags.GeoModel.FaserVersion = "FASERNU-02" # Geometry set-up - ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" # Conditions set-up - ConfigFlags.addFlag("Input.InitialTimeStamp", 0) # To avoid autoconfig + ConfigFlags.addFlag("Sim.Beam.xshift", 0) # Potential beam shift + ConfigFlags.addFlag("Sim.Beam.yshift", 0) + + if args.geom == "TI12MC": + # 2022 TI12 geometry + ConfigFlags.GeoModel.FaserVersion = "FASERNU-02" # Geometry set-up + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" # Conditions set-up + # TI12 detectors + detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', + 'Dipole', 'Emulsion', 'Trench'] + + elif args.geom == "TestBeamMC": + # Define 2021 test beam geometry + ConfigFlags.GeoModel.FaserVersion = "FASER-TB00" # Geometry set-up + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-TB00" # Conditions set-up + # Testbeam detectors (trigger layers are actually veto counters) + detectors = ['Veto', 'Preshower', 'FaserSCT', 'Ecal'] + + else: + print(f"Unknown geometry {args.geom}!") + sys.exit(1) + + ConfigFlags.addFlag("Input.InitialTimeStamp", 0) # To avoid autoconfig ConfigFlags.GeoModel.Align.Dynamic = False - # # Preset particle gun parameters # - from math import atan + import ParticleGun as PG from AthenaCommon.SystemOfUnits import GeV, TeV, cm, m from AthenaCommon.PhysicalConstants import pi - # 11 - electron, 13 - muon, 22 - photon - import ParticleGun as PG - ConfigFlags.Sim.Gun = { - "Generator" : "SingleParticle", "pid" : 13, - "energy" : PG.LogSampler(10*GeV, 1*TeV), - "theta" : PG.GaussianSampler(0, atan((10*cm)/(7*m)), oneside = True), - "phi" : [0, 2*pi], "mass" : 0.511, "radius" : -10*cm, #"z": -2.0*m, - "randomSeed" : 12345} - -# -# Command-line overrides -# -# These have the format: Sim.Gun='{"pid" : 11}' -# Filename: Output.HITSFileName="test.muon.001.root" -# Also number of events: --evtMax 100 -# Starting at z = -1.5m (-1500.) will miss the veto (for electrons) - - import sys - ConfigFlags.fillFromArgs(sys.argv[1:]) - + if isinstance(args.pid, list): + # Note args.pid is a list, must make this a set for ParticleGun + pidarg = set(args.pid) + else: + # Just pass a single value + pidarg = args.pid + + print(f"Using pid: {args.pid} => {pidarg}") + + # Create the simgun dictionary + # Negative radius gives uniform sampling + # Positive radius gives Gaussian sampling + sg_dict = { + "Generator" : "SingleParticle", + "pid" : pidarg, "mass" : args.mass, + "theta" : PG.GaussianSampler(0, args.angle, oneside = True), + "phi" : [0, 2*pi], "radius" : args.radius, + "randomSeed" : args.outfile + } + + # -1000 is safely upstream of detector (to be checked) + # Note zpos is in mm! + if args.zpos: + sg_dict["z"] = args.zpos + + # Determine energy sampling + if args.sampler == "lin": + sg_dict["energy"] = PG.UniformSampler(args.minE*GeV, args.maxE*GeV) + elif args.sampler == "log": + sg_dict["energy"] = PG.LogSampler(args.minE*GeV, args.maxE*GeV) + elif args.sampler == "const": + sg_dict["energy"] = PG.ConstSampler(args.maxE*GeV) + else: + print(f"Sampler {args.sampler} not known!") + sys.exit(1) + + # Pass this in one go to ConfigFlags + ConfigFlags.Sim.Gun = sg_dict + + doShiftLOS = (ConfigFlags.Sim.Beam.xangle or ConfigFlags.Sim.Beam.yangle or + ConfigFlags.Sim.Beam.xshift or ConfigFlags.Sim.Beam.yshift) + + if doShiftLOS: + pgConfig = ConfigFlags.Sim.Gun + pgConfig["McEventKey"] = "BeamTruthEvent_ATLASCoord" + ConfigFlags.Sim.Gun = pgConfig # # By being a little clever, we can steer the geometry setup from the command line using GeoModel.FaserVersion # -# MDC configuration -# - detectors = ['Veto', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion'] -# # Setup detector flags # from CalypsoConfiguration.DetectorConfigFlags import setupDetectorsFromList @@ -134,20 +184,22 @@ if __name__ == '__main__': # Shift LOS # - if ConfigFlags.Sim.Beam.xangle or ConfigFlags.Sim.Beam.yangle: - MCEventKey = "BeamTruthEventShifted" + if doShiftLOS: import McParticleEvent.Pythonizations from GeneratorUtils.ShiftLOSConfig import ShiftLOSCfg - cfg.merge(ShiftLOSCfg(ConfigFlags, OutputMCEventKey = MCEventKey, - xcross = ConfigFlags.Sim.Beam.xangle, ycross = ConfigFlags.Sim.Beam.yangle)) - else: - MCEventKey = "BeamTruthEvent" + + cfg.merge(ShiftLOSCfg(ConfigFlags, + xcross = ConfigFlags.Sim.Beam.xangle, + ycross = ConfigFlags.Sim.Beam.yangle, + xshift = ConfigFlags.Sim.Beam.xshift, + yshift = ConfigFlags.Sim.Beam.yshift)) + # # Add the G4FaserAlg # from G4FaserAlg.G4FaserAlgConfigNew import G4FaserAlgCfg - cfg.merge(G4FaserAlgCfg(ConfigFlags, InputTruthCollection = MCEventKey)) + cfg.merge(G4FaserAlgCfg(ConfigFlags)) # # Dump config # diff --git a/Control/CalypsoExample/Generation/scripts/submit_faserMDC_foresee.sh b/Control/CalypsoExample/Generation/scripts/submit_faserMDC_foresee.sh index 6278fddba9c504ae360c0947c445553b455cdccc..94d9ab8b85e2dbf30498662178e599a519641951 100755 --- a/Control/CalypsoExample/Generation/scripts/submit_faserMDC_foresee.sh +++ b/Control/CalypsoExample/Generation/scripts/submit_faserMDC_foresee.sh @@ -4,6 +4,10 @@ # Usage: # submit_faserMDC_foresee.sh config_file segment [release_directory] [working_directory] # +# Options: +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file +# # config_file - full file name (with path) # segment - segment number (file segment) # release_directory - optional path to release install directory (default pwd) @@ -19,6 +23,32 @@ # Keep track of time SECONDS=0 # +# Parse options +while [ -n "$1" ] +do + case "$1" in + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done +# # Parse command-line options config_path=${1} segment=${2} @@ -32,6 +62,12 @@ then echo "Usage: submit_faserMDC_foresee.sh config_file segment [release dir] [output dir]" exit 1 fi +# Check if relative path (only works run interactively) +if ! [[ ${config_path::1} == "/" ]]; then + echo "config_path should be absolute!" + config_path=`pwd`/${1} + echo "Using: $config_path" +fi # if [ -z "$segment" ] then @@ -83,7 +119,8 @@ fi mkdir -p "$output_directory" # # This magic redirects everything in this script to our log file -exec >& "$output_directory/${config_file_stem}-${seg_str}.log" +logfile="${config_file_stem}-${seg_str}.gen.log" +exec >& "$output_directory/${logfile}" echo `date` - $HOSTNAME echo "File: $config_file" echo "Segment: $seg_str" @@ -119,6 +156,14 @@ if [[ "$gentag" == "sim/s"???? ]]; then tag=`echo "$gentag" | cut -c 5-10` echo "Found sim tag: $tag" fi +if [[ "$gentag" == "digi/d"???? ]]; then + tag=`echo "$gentag" | cut -c 6-11` + echo "Found digi tag: $tag" +fi +if [[ "$gentag" == "reco/r"???? ]]; then + tag=`echo "$gentag" | cut -c 6-11` + echo "Found reco tag: $tag" +fi # # Move to the run directory cd "$starting_directory" @@ -148,3 +193,31 @@ fi # Print out ending time date echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-HITS.root to $outdest" + mkdir -p $outdest + eos cp *-HITS.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logffile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Generation/scripts/submit_faserMDC_particlegun.sh b/Control/CalypsoExample/Generation/scripts/submit_faserMDC_particlegun.sh index c85fa4803a7fe9d2acfd0d3e29beaf26e515afe6..5d704e9a51445d2ae408512c4eb0df83c71a8b39 100755 --- a/Control/CalypsoExample/Generation/scripts/submit_faserMDC_particlegun.sh +++ b/Control/CalypsoExample/Generation/scripts/submit_faserMDC_particlegun.sh @@ -4,11 +4,17 @@ # Usage: # submit_faserMDC_particlegun.sh config_file segment [release_directory] [working_directory] # +# Options: +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file +# # config_file - full file name (with path) # segment - segment number (file segment) # release_directory - optional path to release install directory (default pwd) # working_directory - optional path to output directory location (default pwd) # +# Afterwards, the output file will be copied to the directory specified in working_directory +# # The release directory must already be set up # (so an unqualified asetup can set up the release properly) # @@ -19,6 +25,32 @@ # Keep track of time SECONDS=0 # +# Parse options +while [ -n "$1" ] +do + case "$1" in + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done +# # Parse command-line options config_path=${1} segment=${2} @@ -26,25 +58,27 @@ release_directory=${3} working_directory=${4} # # Set defaults if arguments aren't provided -if [ -z "$config_path" ] -then +if [ -z "$config_path" ]; then echo "No config_path specified!" echo "Usage: submit_faserMDC_particlegun.sh config_file segment [release dir] [output dir]" exit 1 fi +# Check if relative path (only works run interactively) +if ! [[ ${config_path::1} == "/" ]]; then + echo "config_path should be absolute!" + config_path=`pwd`/${1} + echo "Using: $config_path" +fi # -if [ -z "$segment" ] -then +if [ -z "$segment" ]; then segment=0 fi # -if [ -z "$release_directory" ] -then +if [ -z "$release_directory" ]; then release_directory=`pwd` fi # -if [ -z "$working_directory" ] -then +if [ -z "$working_directory" ]; then working_directory=`pwd` fi # @@ -83,7 +117,8 @@ fi mkdir -p "$output_directory" # # This magic redirects everything in this script to our log file -exec >& "$output_directory/${config_file_stem}-${seg_str}.log" +logfile="${config_file_stem}-${seg_str}.gen.log" +exec >& "$output_directory/${logfile}" echo `date` - $HOSTNAME echo "File: $config_file" echo "Segment: $seg_str" @@ -119,6 +154,14 @@ if [[ "$gentag" == "sim/s"???? ]]; then tag=`echo "$gentag" | cut -c 5-10` echo "Found sim tag: $tag" fi +if [[ "$gentag" == "digi/d"???? ]]; then + tag=`echo "$gentag" | cut -c 6-11` + echo "Found digi tag: $tag" +fi +if [[ "$gentag" == "reco/r"???? ]]; then + tag=`echo "$gentag" | cut -c 6-11` + echo "Found reco tag: $tag" +fi # # Move to the run directory cd "$starting_directory" @@ -148,5 +191,31 @@ fi # Print out ending time date echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch # - +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-HITS.root to $outdest" + mkdir -p $outdest + eos cp *-HITS.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Generation/scripts/submit_faser_particlegun.sh b/Control/CalypsoExample/Generation/scripts/submit_faser_particlegun.sh new file mode 100755 index 0000000000000000000000000000000000000000..ec13600f7a94ce8f1958652f83a6e36f72b4563f --- /dev/null +++ b/Control/CalypsoExample/Generation/scripts/submit_faser_particlegun.sh @@ -0,0 +1,221 @@ +#!/bin/bash +# Used with a condor file to submit to vanilla universe +# +# Usage: +# submit_faser_particlegun.sh config_file segment [release_directory] [working_directory] +# +# Options: +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file +# +# config_file - full file name (with path) +# segment - segment number (file segment) +# release_directory - optional path to release install directory (default pwd) +# working_directory - optional path to output directory location (default pwd) +# +# Afterwards, the output file will be copied to the directory specified in working_directory +# +# The release directory must already be set up +# (so an unqualified asetup can set up the release properly) +# +# Script will use git describe to find the release tag. +# If this matches gen/g???? or sim/s???? it will be passed to the job +# +#---------------------------------------- +# Keep track of time +SECONDS=0 +# +# Parse options +while [ -n "$1" ] +do + case "$1" in + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done +# +# Parse command-line options +config_path=${1} +segment=${2} +release_directory=${3} +working_directory=${4} +# +# Set defaults if arguments aren't provided +if [ -z "$config_path" ]; then + echo "No config_path specified!" + echo "Usage: submit_faser_particlegun.sh config_file segment [release dir] [output dir]" + exit 1 +fi +# Check if relative path (only works run interactively) +if ! [[ ${config_path::1} == "/" ]]; then + echo "config_path should be absolute!" + config_path=`pwd`/${1} + echo "Using: $config_path" +fi +# +if [ -z "$segment" ]; then + segment=0 +fi +# +if [ -z "$release_directory" ]; then + release_directory=`pwd` +fi +# +if [ -z "$working_directory" ]; then + working_directory=`pwd` +fi +# +# Apply padding to segment number +printf -v seg_str "%05d" $segment +# +starting_directory=`pwd` +# +# Now extract the file stem +# +# First, get the filename +config_file=$(basename "$config_path") +# +# Now split based on '.' to get stem +defaultIFS=$IFS +IFS='.' +read config_file_stem ext <<< "$config_file" +# +# Try to find the run number +IFS='-' +# Read the split words into an array based on delimeter +read faser short run_number <<< "$config_file_stem" +# +# Set the IFS delimeter back or else echo doesn't work... +IFS=$defaultIFS +# +# Check if we found a number, use full config name if not +output_directory="$working_directory/${run_number}" +re='^[0-9]+$' +if ! [[ $run_number =~ $re ]] ; then + # Not a number... + output_directory="$working_directory/${config_file_stem}" +fi +# +# Make output directory if needed +mkdir -p "$output_directory" +# +# This magic redirects everything in this script to our log file +logfile="${config_file_stem}-${seg_str}.gen.log" +exec >& "$output_directory/${logfile}" +echo `date` - $HOSTNAME +echo "File: $config_file" +echo "Segment: $seg_str" +echo "Release: $release_directory" +echo "Output: $output_directory" +echo "Starting: $starting_directory" +# +# Set up the release (do this automatically)? +export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase +source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh +# +# Try automatic +# Always go back to the starting directory in case paths are relative +cd "$starting_directory" +cd "$release_directory" +# This doesn't seem to work, as we need the --input argument +#asetup +#source build/x8*/setup.sh +# +# Do this by hand +asetup --input=calypso/asetup.faser Athena,22.0.49 +source build/x86*/setup.sh +# +# +# Try to find a release tag +cd calypso +gentag=`git describe` +if [[ "$gentag" == "gen/g"???? ]]; then + tag=`echo "$gentag" | cut -c 5-10` + echo "Found gen tag: $tag" +fi +if [[ "$gentag" == "sim/s"???? ]]; then + tag=`echo "$gentag" | cut -c 5-10` + echo "Found sim tag: $tag" +fi +if [[ "$gentag" == "digi/d"???? ]]; then + tag=`echo "$gentag" | cut -c 6-11` + echo "Found digi tag: $tag" +fi +if [[ "$gentag" == "reco/r"???? ]]; then + tag=`echo "$gentag" | cut -c 6-11` + echo "Found reco tag: $tag" +fi +# +# Move to the run directory +cd "$starting_directory" +cd "$output_directory" +# +# Remove any previous directory if it exists +#if [[ -e "$file_stem" ]]; then +# echo "Remove previous directory $file_stem" +# rm -rf "$file_stem" +#fi +# +# Make run directory +if [[ -e "${config_file_stem}-${seg_str}" ]]; then + echo "Directory ${config_file_stem}-${seg_str} already exists" +else + mkdir "${config_file_stem}-${seg_str}" +fi +cd "${config_file_stem}-${seg_str}" +# +# Run job +if [[ -z "$tag" ]]; then + faser_particlegun.py "--conf=$config_path" "--segment=$seg_str" +else + faser_particlegun.py "--conf=$config_path" "--segment=$seg_str" "--tag=$tag" +fi +# +# Print out ending time +date +echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-HITS.root to $outdest" + mkdir -p $outdest + eos cp *-HITS.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Reconstruction/CMakeLists.txt b/Control/CalypsoExample/Reconstruction/CMakeLists.txt index b7329daa28e45b663f6ea7b33e8155b004ee2d6e..23af9627f673516d4e78d17a75596770ca6e3b4b 100644 --- a/Control/CalypsoExample/Reconstruction/CMakeLists.txt +++ b/Control/CalypsoExample/Reconstruction/CMakeLists.txt @@ -18,19 +18,23 @@ atlas_install_python_modules( python/*.py ) atlas_install_scripts( scripts/*.sh scripts/*.py ) atlas_add_test( ProdRecoTI12 - SCRIPT scripts/faser_reco.py ${CMAKE_CURRENT_SOURCE_DIR}/../rawdata/Faser-Physics-001920-filtered.raw TI12Data + SCRIPT scripts/faser_reco.py --geom=TI12Data ${CMAKE_CURRENT_SOURCE_DIR}/../rawdata/Faser-Physics-001920-filtered.raw PROPERTIES TIMEOUT 300 ) -# Turn this off until we figure out the CKF behavior on testbeam data atlas_add_test( ProdRecoTestBeam - SCRIPT scripts/faser_reco.py ${CMAKE_CURRENT_SOURCE_DIR}/../RAWDATA/Faser-Physics-003613-filtered.raw TestBeamData + SCRIPT scripts/faser_reco.py --geom=TestBeamData ${CMAKE_CURRENT_SOURCE_DIR}/../RAWDATA/Faser-Physics-003613-filtered.raw PROPERTIES TIMEOUT 300 ) atlas_add_test( ProdRecoPilotTracks - SCRIPT scripts/faser_reco.py ${CMAKE_CURRENT_SOURCE_DIR}/../RAWDATA/Faser-Physics-pilot_tracks-filtered.raw TI12Data + SCRIPT scripts/faser_reco.py --geom=TI12Data ${CMAKE_CURRENT_SOURCE_DIR}/../RAWDATA/Faser-Physics-pilot_tracks-filtered.raw PROPERTIES TIMEOUT 300 ) -atlas_add_test( ProdRecoTI12-2022 - SCRIPT scripts/faser_reco.py ${CMAKE_CURRENT_SOURCE_DIR}/../rawdata/Faser-Physics-006525-filtered.raw TI12Data02 +# Test of TI12Data02 geometry (should auto-select from run number) +atlas_add_test( ProdRecoTI12-02-2022 + SCRIPT scripts/faser_reco.py ${CMAKE_CURRENT_SOURCE_DIR}/../rawdata/Faser-Physics-006525-filtered.raw PROPERTIES TIMEOUT 300 ) +# Test of TI12Data03 geometry (should auto-select from run number) +atlas_add_test( ProdRecoTI12-03-2022 + SCRIPT scripts/faser_reco.py ${CMAKE_CURRENT_SOURCE_DIR}/../rawdata/Faser-Physics-007833-00003-TrigMask08.raw + PROPERTIES TIMEOUT 300 ) diff --git a/Control/CalypsoExample/Reconstruction/scripts/faser_reco.py b/Control/CalypsoExample/Reconstruction/scripts/faser_reco.py index 2063b729cb1c5a09ac4a2e90c914fc7096a85412..f7ddab003a59a77df721ee1951a440a16537b614 100755 --- a/Control/CalypsoExample/Reconstruction/scripts/faser_reco.py +++ b/Control/CalypsoExample/Reconstruction/scripts/faser_reco.py @@ -2,16 +2,16 @@ # # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration # Run with: -# ./faser_reco.py filepath [runtype] +# ./faser_reco.py [--geom=runtype] filepath # # filepath - fully qualified path, including url if needed, to the input raw data file # example: "root://hepatl30//atlas/local/torrence/faser/commissioning/TestBeamData/Run-004150/Faser-Physics-004150-00000.raw" # # runtype - optionally specify the data type (TI12Data, TI12Data02, TI12Data03 or TestBeamData). -# In a normal file system location, this will be extracted from the directory name, -# but runtype will override this assignment. -# TI12Data02 is needed for the IFT geometry. Script will auto-detect this if read -# from normal file system location. +# +# Options: +# --isMC - needed to reconstruct MC data +# --testBeam - shortcut to specify testbeam geometry # import sys import time @@ -23,8 +23,8 @@ parser = argparse.ArgumentParser(description="Run FASER reconstruction") parser.add_argument("file_path", help="Fully qualified path of the raw input file") -parser.add_argument("run_type", nargs="?", default="", - help="Specify run type (if it can't be parsed from path)") +parser.add_argument("-g", "--geom", default="", + help="Specify geometry (if it can't be parsed from run number)\n Values: TI12Data03 (2022 TI12)") parser.add_argument("-r", "--reco", default="", help="Specify reco tag (to append to output filename)") parser.add_argument("-n", "--nevents", type=int, default=-1, @@ -33,7 +33,8 @@ parser.add_argument("-v", "--verbose", action='store_true', help="Turn on DEBUG output") parser.add_argument("--isMC", action='store_true', help="Running on digitised MC rather than data") - +parser.add_argument("--testBeam", action='store_true', + help="Set geometry for 2021 test beam") args = parser.parse_args() @@ -42,37 +43,38 @@ from pathlib import Path filepath=Path(args.file_path) # runtype has been provided -if len(args.run_type) > 0: - runtype=args.run_type +if len(args.geom) > 0: + runtype=args.geom + +# Shortcut for testbeam +elif args.testBeam: + print(f"Use 2021 TestBeam configuration") + runtype = "TestBeamData" -# Extract runtype from path -# Should be directory above run -# i.e.: TestBeamData/Run-004150/Faser-Physics-004150-00000.raw" else: - if len(filepath.parts) < 3: - print("Can't determine run type from path - guessing TI12Data!") - print("If this is not correct, specify on command line") - runtype = "TI12Data" - else: - runtype = filepath.parts[-3] + runtype = "TI12Data03" - # Fix TI12 geometry versions as well (needed in production) - # Probably better to do this from configuration in upstream production scripts, + # Try to pick correct geometry from run number + # This won't work for testbeam data, # so lets call this a hack for now - if runtype == "TI12Data": - - runname = filepath.parts[-2] - try: - runnumber = int(runname.split('-')[1]) - except Exception as e: - print(f"Failed to find run number in {filepath}") - print(f"Couldn't parse {runname}") - print(f"Leave runtype as {runtype}!") + runname = filepath.parts[-2] + try: + runnumber = int(runname.split('-')[1]) + except Exception as e: + print(f"Failed to find run number in {filepath}") + print(f"Couldn't parse {runname}") + print(f"Leave runtype as {runtype}!") + else: + if runnumber > 6700: # Not sure if this is exact + print(f"Found run number {runnumber}, using TI12 configuration with IFT+faserNu") + runtype = "TI12Data03" + elif runnumber > 5302: # Last TI12 run on Nov. 23, 2021 without IFT + print(f"Found run number {runnumber}, using TI12 configuration with IFT") + runtype = "TI12Data02" else: - if runnumber > 5302: # Last TI12 run on Nov. 23, 2021 without IFT - print(f"Found run number {runnumber}, using TI12 configuration with IFT") - runtype = "TI12Data02" + print(f"Found run number {runnumber}, using original TI12 configuration") + runtype = "TI12Data" print(f"Starting reconstruction of {filepath.name} with type {runtype}") if args.nevents > 0: @@ -91,7 +93,10 @@ Configurable.configurableRun3Behavior = True # Flags for this job ConfigFlags.Input.isMC = args.isMC # Needed to bypass autoconfig -ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions for now +if args.isMC: + ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions +else: + ConfigFlags.IOVDb.DatabaseInstance = "CONDBR3" # Use data conditions ConfigFlags.Input.ProjectName = "data20" ConfigFlags.GeoModel.Align.Dynamic = False @@ -106,7 +111,7 @@ if runtype == "TI12Data": ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-01" # Testbeam setup -elif runtype == "TestBeamData" or runtype == "TestBeam2021": +elif runtype == "TestBeamData" or runtype == "TestBeamMC": ConfigFlags.GeoModel.FaserVersion = "FASER-TB00" ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-TB00" useCKF = False @@ -124,13 +129,17 @@ elif runtype == "TI12Data03": else: print("Invalid run type found:", runtype) print("Specify correct type or update list") - sys.exit(-1) + sys.exit(1) # Must use original input string here, as pathlib mangles double // in path names ConfigFlags.Input.Files = [ args.file_path ] filestem = filepath.stem +# Remove any filetype modifier +if filestem[-4:] == "-RDO": + filestem = filestem[:-4] + if len(args.reco) > 0: filestem += f"-{args.reco}" @@ -219,8 +228,12 @@ itemList = [ "xAOD::EventInfo#*" ] # if args.isMC: - # Add truth records here? - itemList.extend( ["McEventCollection#*"] ) + # Make xAOD versions of truth + from Reconstruction.xAODTruthCnvAlgConfig import xAODTruthCnvAlgCfg + acc.merge(xAODTruthCnvAlgCfg(ConfigFlags)) + + # Add MC information here + itemList.extend( ["McEventCollection#*", "TrackerSimDataCollection#*"] ) acc.merge(OutputStreamCfg(ConfigFlags, "xAOD", itemList)) @@ -245,9 +258,9 @@ if not args.isMC: replicaSvc.UseGeomSQLite = True # Configure verbosity -# ConfigFlags.dump() if args.verbose: acc.foreach_component("*").OutputLevel = VERBOSE + ConfigFlags.dump() else: acc.foreach_component("*").OutputLevel = INFO diff --git a/Control/CalypsoExample/Reconstruction/scripts/filterRun.sh b/Control/CalypsoExample/Reconstruction/scripts/filterRun.sh new file mode 100755 index 0000000000000000000000000000000000000000..9901f9e9fcd1e3168b9833ddefab28514e2387fa --- /dev/null +++ b/Control/CalypsoExample/Reconstruction/scripts/filterRun.sh @@ -0,0 +1,210 @@ +#!/usr/bin/bash +# +# Script to filter events likely to have tracks and reconstruct them +# Need to build calypso AND faser-common for this to work +# Execute from the run folder in a release directory +# +# Usage: +# ./filterRun.sh [--seg] [--install] <runnumber> +# +# Options: +# --seg - filter individual segments +# by default, run is filtered into single file +# --install - copy output to EOS +# --cleanup - delete local files after EOS copy +# +# During the first 13.6 collision event, this was run with: +# watch -n 150 ./filterRun.sh --install --seg 7733 +# +# The --cleanup flag doesn't work for this, as the script checks the +# local directory (not EOS) to see if a segment has already been processed. +# +# To just make a single filtered file for a run and cleanup afterwards: +# ./filterRun.sh --install --cleanup 7733 +# +# Hacky parameters +# These are used to label the output files, correct for when this was used, +# but may not be correct in the future +rtag='r0008' +filter='TrigMask08' +# +# Usage function +print_usage () { + echo "Usage: filterRun [--seg] [--install] [--cleanup] <runnum>" +} + +# EOS copy function +copy_to_eos () { + local filtdir=/eos/experiment/faser/filter/2022/$filter + local rawdir=$filtdir/raw/$run_number + local recdir=$filtdir/$rtag/$run_number + local logdir=$filtdir/${rtag}_log/$run_number + + # Make sure directories exist + mkdir -p $rawdir + mkdir -p $recdir + mkdir -p $logdir + + # Copy files + echo "Copy $outfile to $rawdir" + xrdcp --silent --force $outfile root://eospublic.cern.ch/${rawdir} + echo "Copy $recfile to $recdir" + xrdcp --silent --force $recfile root://eospublic.cern.ch/${recdir} + echo "Copy $logfile to $logdir" + xrdcp --silent --force $logfile root://eospublic.cern.ch/${logdir} +} + +# Parse commaand-line options +while [ -n "$1" ] +do + case "$1" in + -s | --seg) + echo "Filtering individual segments" + seg=1 + shift;; # This eats the option + + -i | --install) + echo "Copying files to EOS when done" + install=1 + shift;; + + --cleanup) + cleanup=1 + shift;; + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done +# +if [ -z $1 ] +then + echo "No run number specified!" + print_usage + exit 1 +fi +# +printf -v run "%06d" ${1} +echo Filtering run ${1} +# +eosdir=/eos/experiment/faser/raw/2022/$run +appstr='' +for file in $eosdir/* +do + if ! [ -f $file ]; then + continue + fi + + if ! [[ $file == $eosdir/Faser-Physics-* ]]; then + continue + fi + + filename=`basename $file` + + # Split filename + OIFS=$IFS + IFS='.' + read file_stem ext <<< "$filename" + + IFS='-' + read faser type run_number segment <<< "$file_stem" + + # Put the delimieter baack + IFS=$OIFS + + # Do this one file at a time? + if [ -z $seg ] + then + # Append filtered output to one file + outfile=Faser-$type-$run_number-$filter.$ext + logfile=Faser-$type-$run_number-$filter.log + + # Make sure logfile exists for first iteration + touch $logfile + echo Filtering $filename to $outfile + echo Filtering $filename to $outfile >> $logfile + + # Split file to get file name + ../calypso/faser-common/build/EventFormats/eventFilter $appstr -t 0x08 $file $outfile >> $logfile 2>&1 + + # Append from here on out + appstr='-a' + + else + # Separate files for each segment + outfile=Faser-$type-$run_number-$segment-$filter.$ext + logfile=Faser-$type-$run_number-$segment-$filter.log + + # Don't overwrite + if [ -f "$outfile" ]; then + echo $outfile already done - skipping + continue + fi + + echo Filtering $filename to $outfile + echo Filtering $filename to $outfile > $logfile + + # Filter + ../calypso/faser-common/build/EventFormats/eventFilter -t 0x08 $file $outfile >> $logfile 2>&1 + + echo Filtered `grep "#fragments=" $logfile | wc -l` events + echo Filtered `grep "#fragments=" $logfile | wc -l` events >> $logfile + + # And reconstruct it if not empty + if ! [ -s $outfile ]; then + echo "No events filtered" >> $logfile + continue + fi + + # Recontruct + echo "Reco $outfile" + faser_reco.py -r $rtag $outfile TI12Data03 >> $logfile 2>&1 + + # Do we want to save this to EOS? + if [ -z $install ]; then + continue + fi + + # Copy files + recfile=Faser-Physics-$run_number-$segment-$filter-$rtag-xAOD.root + copy_to_eos + + fi + +done + +# Did we write one big file? +if [ -z $seg ] +then + + echo Filtered `grep "#fragments=" $logfile | wc -l` events + echo Filtered `grep "#fragments=" $logfile | wc -l` events >> $logfile + + # Run reco if file isn't empty + if [ -s $outfile ]; then + echo "Reco $outfile" + faser_reco.py -r $rtag $outfile TI12Data03 >> $logfile 2>&1 + + # Do we want to save this to EOS? + if [ -z $install ]; then + exit 0 + fi + + recfile=Faser-Physics-$run_number-$filter-$rtag-xAOD.root + copy_to_eos + + else + echo "No events filtered" >> $logfile + fi +fi + +if ! [ -z $cleanup ]; then + echo "Removing files" + rm Faser-Physics-$run_number-* + rm PoolFileCatalog.xml + rm SegmentFitHistograms.root + rm eventLoopHeartBeat.txt +fi diff --git a/Control/CalypsoExample/Reconstruction/scripts/submit_faserMDC_reco.sh b/Control/CalypsoExample/Reconstruction/scripts/submit_faserMDC_reco.sh index b04d56aa9d13f3bac39f02213e0f47625714c689..5dacef1cf4152e0a1cd817d7e52e9c38c65a1fb9 100755 --- a/Control/CalypsoExample/Reconstruction/scripts/submit_faserMDC_reco.sh +++ b/Control/CalypsoExample/Reconstruction/scripts/submit_faserMDC_reco.sh @@ -4,6 +4,10 @@ # Usage: # submit_faser_reco.sh file_path [release_directory] [working_directory] [nevents] # +# Options: +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file +# # file_path - full file name (with path) # release_directory - optional path to release install directory (default pwd) # working_directory - optional path to output directory location (default pwd) @@ -16,6 +20,34 @@ # If this matches reco/r???? it will be passed to the reco job # #---------------------------------------- +# Keep track of time +SECONDS=0 +# +# Parse command-line options +while [ -n "$1" ] +do + case "$1" in + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done # # Parse command-line options file_path=${1} @@ -70,7 +102,8 @@ output_directory="$working_directory/${run_number}" mkdir -p "$output_directory" # # This magic redirects everything in this script to our log file -exec >& "$output_directory/$file_stem.log" +logfile="${file_stem}.rec.log" +exec >& "$output_directory/${logfile}" echo `date` - $HOSTNAME echo "File: $file_name" echo "Release: $release_directory" @@ -132,3 +165,33 @@ fi # # Print out ending time date +echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-RDO.root to $outdest" + mkdir -p $outdest + # Keep this line from stopping script, so we might get a log file + eos cp *-xAOD.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Reconstruction/scripts/submit_faser_reco.sh b/Control/CalypsoExample/Reconstruction/scripts/submit_faser_reco.sh index 277d6eaa79a3bc510cb413aa742b292837e28403..6924506133cddacd3ad7f4f88aec64121719b9b8 100755 --- a/Control/CalypsoExample/Reconstruction/scripts/submit_faser_reco.sh +++ b/Control/CalypsoExample/Reconstruction/scripts/submit_faser_reco.sh @@ -4,6 +4,12 @@ # Usage: # submit_faser_reco.sh file_path [release_directory] [working_directory] [nevents] # +# Options: +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file +# --geom - specify geometry +# --isMC - needed for MC reco +# # file_path - full file name (with path) # release_directory - optional path to release install directory (default pwd) # working_directory - optional path to output directory location (default pwd) @@ -16,8 +22,45 @@ # If this matches reco/r???? it will be passed to the reco job # #---------------------------------------- +# Keep track of time +SECONDS=0 # # Parse command-line options +while [ -n "$1" ] +do + case "$1" in + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + -g | --geom) + geom="$2"; + shift; + shift;; + + --isMC) + ismc=1 + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) + echo "Unknown option $1" + shift;; + + *) break;; # Not an option, don't shift + esac +done +# +# Parse command-line arguments file_path=${1} release_directory=${2} working_directory=${3} @@ -66,13 +109,16 @@ read faser type run_number segment <<< "$file_stem" IFS=$defaultIFS # # Make output directory if needed -output_directory="$working_directory/Run-$run_number" +output_directory="$working_directory/${run_number}" mkdir -p "$output_directory" # # This magic redirects everything in this script to our log file -exec >& "$output_directory/$file_stem.log" +logfile="${file_stem}.rec.log" +exec >& "$output_directory/$logfile" echo `date` - $HOSTNAME echo "File: $file_name" +echo "Filepath: $file_path" +echo "Geom: $geom" echo "Release: $release_directory" echo "Output: $output_directory" echo "Starting: $starting_directory" @@ -85,13 +131,13 @@ source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh # Always go back to the starting directory in case paths are relative cd "$starting_directory" cd "$release_directory" -asetup -source build/x8*/setup.sh +#asetup +#source build/x8*/setup.sh # # Do this by hand -# asetup --input="$release_directory/calypso/asetup.faser" Athena,22.0.49 -# source "$release_directory/build/x8*/setup.sh" -# +asetup --input=calypso/asetup.faser Athena,22.0.49 +source run/setup.sh +#source build/x86*/setup.sh # # Try to find a release tag cd calypso @@ -102,7 +148,7 @@ if [[ "$recotag" == "reco/r"???? ]]; then fi if [[ "$recotag" == "reco/p"???? ]]; then tag=`echo "$recotag" | cut -c 6-11` - echo "Found reco tag: $tag" + echo "Found proc tag: $tag" fi # # Move to the run directory @@ -124,11 +170,60 @@ fi cd "$file_stem" # # Run job -if [[ -z "$rtag" ]]; then - faser_reco.py "--nevents=$nevents" "$file_path" +if [[ -z "$tag" ]]; then + tagstr="" +else + tagstr="--reco=$tag" +fi +# +if [[ -z "$geom" ]]; then + geomstr="" +else + geomstr="--geom $geom" +fi +# +if [[ -z "$ismc" ]]; then + mcstr="" else - faser_reco.py "--nevents=$nevents" "--reco=$tag" "$file_path" + mcstr="--isMC" fi # +faser_reco.py "--nevents=$nevents" $geomstr $tagstr $mcstr "$file_path" +# # Print out ending time date +echo "Job finished after $SECONDS seconds" +# +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +# Now copy output file +if ! [ -z "$outdest" ] +then + echo "Output directory:" + ls -l + echo "copy *-xAOD.root to $outdest" + eos mkdir -p $outdest + # Keep this line from stopping script, so we might get a log file + # || true ensures script continues even if copy fails + eos cp *-xAOD.root ${outdest}/ || true +fi +# +# Copy log file second +if ! [ -z "$logdest" ] +then + cd .. + echo "Working directory:" + ls -l + echo "copy $logfile to $logdest" + eos mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + echo "Working directory:" + ls -l + echo "copy $logfile to $outdest" + eos mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/Simulation/scripts/faserMDC_simulate.py b/Control/CalypsoExample/Simulation/scripts/faserMDC_simulate.py index 12de492f4070237b59dda273673b994bbc275d37..0fd9622db5de8630ffaef34bd2ae4b6eabe88a5c 100755 --- a/Control/CalypsoExample/Simulation/scripts/faserMDC_simulate.py +++ b/Control/CalypsoExample/Simulation/scripts/faserMDC_simulate.py @@ -160,7 +160,7 @@ if __name__ == '__main__': # # MDC geometry configuration # - detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion'] + detectors = ['Veto', 'VetoNu', 'Preshower', 'FaserSCT', 'Ecal', 'Trigger', 'Dipole', 'Emulsion', 'Trench'] # # Setup detector flags # diff --git a/Control/CalypsoExample/Simulation/scripts/submit_faserMDC_simulate.sh b/Control/CalypsoExample/Simulation/scripts/submit_faserMDC_simulate.sh index 3c175d72519eda6d9f973c84ea3c2b1af00b7dba..03b8a836e5c07c336165f5e30a577aafc9aab76b 100755 --- a/Control/CalypsoExample/Simulation/scripts/submit_faserMDC_simulate.sh +++ b/Control/CalypsoExample/Simulation/scripts/submit_faserMDC_simulate.sh @@ -6,6 +6,8 @@ # # Options: # --shift - apply crossing angle (and FASER shift) +# --out - specify output location (in EOS) to copy output HITS file +# --log - specify output location (in EOS) for log file # # input_file - full file name (with path) # output_file - full output file name @@ -28,16 +30,30 @@ SECONDS=0 while [ -n "$1" ] do case "$1" in - -s | --shift) + -s | --shift) echo "Applying crossing-angle shift" xangle=1 shift;; # This 'eats' the argument - -*) + -l | --log) + logdest="$2"; + shift; + shift;; # Must eat 2 options here + + -o | --out) + outdest="$2"; + shift; + shift;; + + --) # End of options + shift; # Eat this + break;; # And stop parsing + + -*) echo "Unknown option $1" shift;; - *) break;; # Not an option, don't shift + *) break;; # Not an option, don't shift esac done @@ -117,7 +133,8 @@ fi mkdir -p "$output_directory" # # This magic redirects everything in this script to our log file -exec >& "$output_directory/${file_stem}.log" +logfile=${file_stem}.sim.log +exec >& "$output_directory/$logfile" echo `date` - $HOSTNAME echo "Input File: $infile" echo "Output File: $outfilename" @@ -174,4 +191,31 @@ fi date echo "Job finished after $SECONDS seconds" # +# Copy output to EOS if desired +export EOS_MGM_URL=root://eospublic.cern.ch +# +if ! [ -z "$outdest" ] +then + ls -l + echo "copy *-HITS.root to $outdest" + mkdir -p $outdest + eos cp *-HITS.root ${outdest}/ || true +fi +# +# Also copy log file +if ! [ -z "$logdest" ] +then + cd .. + ls -l + echo "copy $logfile to $logdest" + mkdir -p $logdest + eos cp $logfile $logdest/$logfile +elif ! [ -z "$outdest" ] +then + cd .. + ls -l + echo "copy $logfile to $outdest" + mkdir -p $outdest + eos cp $logfile $outdest/$logfile +fi diff --git a/Control/CalypsoExample/rawdata/Faser-Physics-007833-00003-TrigMask08.raw b/Control/CalypsoExample/rawdata/Faser-Physics-007833-00003-TrigMask08.raw new file mode 100644 index 0000000000000000000000000000000000000000..55004da5cc7d612760d61e2fe6a7d2d9edede69a Binary files /dev/null and b/Control/CalypsoExample/rawdata/Faser-Physics-007833-00003-TrigMask08.raw differ diff --git a/Database/ConnectionManagement/FaserAuthentication/data/dblookup.xml b/Database/ConnectionManagement/FaserAuthentication/data/dblookup.xml index 4aa3d6a1493e1637b49d2ac3c6b6831cfb2cbe79..5296f3e70d15c297c5489378326fe25f809ecb93 100644 --- a/Database/ConnectionManagement/FaserAuthentication/data/dblookup.xml +++ b/Database/ConnectionManagement/FaserAuthentication/data/dblookup.xml @@ -32,8 +32,8 @@ </logicalservice> <logicalservice name="COOLOFL_TRIGGER"> - <service name="sqlite_file:data/sqlite200/waveform_reco.db" accessMode="read" /> - <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/current/sqlite200/waveform_reco.db" accessMode="read" /> + <service name="sqlite_file:data/sqlite200/ALLP200.db" accessMode="read" /> + <service name="sqlite_file:///cvmfs/faser.cern.ch/repo/sw/database/DBRelease/current/sqlite200/ALLP200.db" accessMode="read" /> </logicalservice> </servicelist> diff --git a/DetectorDescription/GeoModel/FaserGeoModel/python/GeoModelInit.py b/DetectorDescription/GeoModel/FaserGeoModel/python/GeoModelInit.py index af1ed16dcb630fd08948f6a648d9deb6d46756e2..d17c8e39d932df7be5a7265d7b4c2403a0546430 100644 --- a/DetectorDescription/GeoModel/FaserGeoModel/python/GeoModelInit.py +++ b/DetectorDescription/GeoModel/FaserGeoModel/python/GeoModelInit.py @@ -95,7 +95,9 @@ def _setupGeoModel(): # Deal with SCT alignment conditions folders and algorithms - conddb.addFolderSplitOnline("SCT","/Tracker/Onl/Align","/Tracker/Align",className="AlignableTransformContainer") + #conddb.addFolderSplitOnline("SCT","/Tracker/Onl/Align","/Tracker/Align",className="AlignableTransformContainer") + print("Override Alignment dbname to OFLP200, fix this when alignment available in CONDBR3") + conddb.addFolder("/Tracker/Align", "SCT_OFL",className="AlignableTransformContainer",db="OFLP200") from AthenaCommon.AlgSequence import AthSequencer condSeq = AthSequencer("AthCondSeq") if not hasattr(condSeq, "FaserSCT_AlignCondAlg"): diff --git a/PhysicsAnalysis/NeutrinoSearch/CMakeLists.txt b/PhysicsAnalysis/NeutrinoSearch/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..cff2ab8575553261c18eb0736c211528e44f22bb --- /dev/null +++ b/PhysicsAnalysis/NeutrinoSearch/CMakeLists.txt @@ -0,0 +1,12 @@ +atlas_subdir(NeutrinoSearch) + +atlas_add_component( + NeutrinoSearch + src/NeutrinoSearchAlg.h + src/NeutrinoSearchAlg.cxx + src/component/NeutrinoSearch_entries.cxx + LINK_LIBRARIES AthenaBaseComps StoreGateLib xAODFaserWaveform ScintIdentifier FaserCaloIdentifier GeneratorObjects FaserActsGeometryLib TrackerSimEvent TrackerSimData TrackerIdentifier TrackerReadoutGeometry TrkTrack GeoPrimitives TrackerRIO_OnTrack +) + +atlas_install_python_modules(python/*.py) +# atlas_install_scripts(test/*.py) diff --git a/PhysicsAnalysis/NeutrinoSearch/python/FilterSearchConfig.py b/PhysicsAnalysis/NeutrinoSearch/python/FilterSearchConfig.py new file mode 100644 index 0000000000000000000000000000000000000000..9b76c14b8b6682b6cfe77ed3331467d4f9737cfe --- /dev/null +++ b/PhysicsAnalysis/NeutrinoSearch/python/FilterSearchConfig.py @@ -0,0 +1,530 @@ +""" + Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration +""" + +from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator +from AthenaConfiguration.ComponentFactory import CompFactory +from MagFieldServices.MagFieldServicesConfig import MagneticFieldSvcCfg + +def NeutrinoSearchAlgCfg(flags, **kwargs): + # Initialize GeoModel + from FaserGeoModel.FaserGeoModelConfig import FaserGeometryCfg + acc = FaserGeometryCfg(flags) + + acc.merge(MagneticFieldSvcCfg(flags)) + # acc.merge(FaserActsTrackingGeometrySvcCfg(flags)) + # acc.merge(FaserActsAlignmentCondAlgCfg(flags)) + + actsExtrapolationTool = CompFactory.FaserActsExtrapolationTool("FaserActsExtrapolationTool") + actsExtrapolationTool.MaxSteps = 1000 + actsExtrapolationTool.TrackingGeometryTool = CompFactory.FaserActsTrackingGeometryTool("TrackingGeometryTool") + + NeutrinoSearchAlg = CompFactory.NeutrinoSearchAlg("NeutrinoSearchAlg",**kwargs) + NeutrinoSearchAlg.ExtrapolationTool = actsExtrapolationTool + acc.addEventAlgo(NeutrinoSearchAlg) + + thistSvc = CompFactory.THistSvc() + thistSvc.Output += ["HIST2 DATAFILE='FilterSearch.root' OPT='RECREATE'"] + acc.addService(thistSvc) + + return acc + +if __name__ == "__main__": + + import sys + from AthenaCommon.Logging import log, logging + from AthenaCommon.Constants import DEBUG, VERBOSE, INFO + from AthenaCommon.Configurable import Configurable + from CalypsoConfiguration.AllConfigFlags import ConfigFlags + from AthenaConfiguration.TestDefaults import defaultTestFiles + from CalypsoConfiguration.MainServicesConfig import MainServicesCfg + from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg + # from OutputStreamAthenaPool.OutputStreamConfig import OutputStreamCfg + + # Set up logging and new style config + log.setLevel(DEBUG) + Configurable.configurableRun3Behavior = True + + # Configure + ConfigFlags.Input.Files = [ + '/run/media/dcasper/Data/faser/data/Faser-Physics-007613-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007705-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007720-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007725-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007728-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007729-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007730-00003-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007733-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007734-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007802-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00003-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00004-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00005-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00006-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00007-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00008-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00009-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00010-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00011-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00012-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00013-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00014-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00015-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00016-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00017-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00018-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00019-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00020-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00021-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00022-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00023-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00024-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00025-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00026-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00027-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00028-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00029-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00030-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00031-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00032-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00033-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00034-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00035-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00036-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00037-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00038-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00039-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00040-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00041-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00042-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007833-00043-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00005-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00006-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00007-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00012-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00013-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00014-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00015-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00016-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00017-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00018-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00019-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00020-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00021-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00022-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00023-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00024-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00025-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00026-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00027-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00028-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00029-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00030-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00031-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00032-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00033-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00034-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00035-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00036-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00037-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00038-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00039-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00040-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00041-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00042-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00043-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00044-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00045-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00046-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00047-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00048-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00049-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00050-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00051-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00052-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00053-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00054-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00055-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007835-00061-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007836-00000-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007837-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007838-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007840-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007841-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007842-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007843-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007844-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007845-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007846-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007847-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00000-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00001-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00002-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00003-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00004-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00005-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00006-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00007-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00008-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00009-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00010-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00011-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00012-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00013-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00014-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00015-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00016-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007848-00017-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00005-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00006-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00007-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00008-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00009-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00010-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00011-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00012-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00013-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00014-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00015-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00016-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00017-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00018-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00019-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00020-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00021-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00022-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00023-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00024-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00025-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00026-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00027-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00028-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00029-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007849-00030-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00002-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00003-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00004-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00005-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00006-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00007-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00008-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00009-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00010-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00011-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00012-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00013-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00014-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00015-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00016-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00017-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00018-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00019-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00020-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00021-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00022-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00023-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00024-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00025-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00026-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00027-TrigMask08-r0008-xAOD.root', + # '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00028-TrigMask08-r0008-xAOD.root', # not closed? + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00029-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00030-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00031-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00032-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00033-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00034-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00035-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00036-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00037-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00038-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00039-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00040-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00041-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00042-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00043-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00044-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00045-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00046-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00047-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00048-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00049-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00050-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00051-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00052-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00053-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00054-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007918-00055-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00001-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00002-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00003-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00004-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00005-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00006-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00007-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00008-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00009-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00010-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00011-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00012-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00013-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00014-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00015-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00016-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00017-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00018-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00019-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00020-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00021-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00022-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00023-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00024-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00025-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00027-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00028-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00030-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00031-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00033-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00035-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00037-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00038-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00043-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00044-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00045-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00046-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00047-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00048-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00049-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00050-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00051-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00052-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00053-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00054-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00055-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00056-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00057-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00058-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00059-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00060-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00061-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00062-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00063-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00064-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00065-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00066-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00067-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00068-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00069-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00070-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00071-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00072-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00073-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00074-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00075-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00076-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00077-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00078-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00079-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00080-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00081-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00082-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00083-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00084-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00085-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00086-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00087-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00088-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00089-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00090-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00091-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00092-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00093-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00094-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00095-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00096-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00097-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00098-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00099-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00100-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00101-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00102-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00103-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00104-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00105-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00106-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00107-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00108-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00109-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007930-00110-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00003-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00005-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00006-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00007-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00008-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00009-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00010-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00011-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00012-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00013-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00014-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00015-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00016-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00017-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00018-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00019-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00020-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00021-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00022-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00023-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00024-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00025-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00026-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00027-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00028-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00029-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00030-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00031-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00032-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00033-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00034-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00035-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00036-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00037-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00038-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00039-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00040-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00041-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00042-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00043-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00044-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00045-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00046-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00047-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007961-00048-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007972-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007973-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007974-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007975-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007976-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007977-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007978-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007979-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007980-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007981-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007982-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007983-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00000-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00001-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00002-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00003-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00004-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00005-TrigMask08-r0008-xAOD.root', + # '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00006-TrigMask08-r0008-xAOD.root', # not closed? + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00007-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00008-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00009-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00010-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00011-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00012-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007984-00013-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00003-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00004-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00005-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00006-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00007-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00008-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00009-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00010-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00011-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00012-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00013-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00015-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00016-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00017-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00018-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00019-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00021-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00022-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00023-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00024-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00025-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00026-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00027-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00028-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00029-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00030-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00031-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00032-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00033-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00034-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00035-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00036-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00037-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00039-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00040-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00041-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00042-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00043-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00044-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00045-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00046-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00047-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00048-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00049-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00050-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00051-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00052-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00053-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00054-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00055-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00056-TrigMask08-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/data/Faser-Physics-007985-00057-TrigMask08-r0008-xAOD.root', + + + ] + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" # Always needed; must match FaserVersionS + ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions for now + ConfigFlags.Input.ProjectName = "data21" # Needed to bypass autoconfig + ConfigFlags.Input.isMC = False # Needed to bypass autoconfig + ConfigFlags.GeoModel.FaserVersion = "FASERNU-03" # FASER geometry + ConfigFlags.Common.isOnline = False + ConfigFlags.GeoModel.Align.Dynamic = False + ConfigFlags.Beam.NumberOfCollisions = 0. + + ConfigFlags.Detector.GeometryFaserSCT = True + + ConfigFlags.lock() + + # Core components + acc = MainServicesCfg(ConfigFlags) + acc.merge(PoolReadCfg(ConfigFlags)) + + # algorithm + acc.merge(NeutrinoSearchAlgCfg(ConfigFlags, UseFlukaWeights=True)) + + # # Hack to avoid problem with our use of MC databases when isMC = False + replicaSvc = acc.getService("DBReplicaSvc") + replicaSvc.COOLSQLiteVetoPattern = "" + replicaSvc.UseCOOLSQLite = True + replicaSvc.UseCOOLFrontier = False + replicaSvc.UseGeomSQLite = True + + # Timing + #acc.merge(MergeRecoTimingObjCfg(ConfigFlags)) + + # Dump config + # logging.getLogger('forcomps').setLevel(VERBOSE) + # acc.foreach_component("*").OutputLevel = VERBOSE + # acc.foreach_component("*ClassID*").OutputLevel = INFO + # acc.getCondAlgo("FaserSCT_AlignCondAlg").OutputLevel = VERBOSE + # acc.getCondAlgo("FaserSCT_DetectorElementCondAlg").OutputLevel = VERBOSE + # acc.getService("StoreGateSvc").Dump = True + # acc.getService("ConditionStore").Dump = True + # acc.printConfig(withDetails=True) + # ConfigFlags.dump() + + # Execute and finish + sc = acc.run(maxEvents=-1) + + # Success should be 0 + sys.exit(not sc.isSuccess()) diff --git a/PhysicsAnalysis/NeutrinoSearch/python/FlukaSearchConfig.py b/PhysicsAnalysis/NeutrinoSearch/python/FlukaSearchConfig.py new file mode 100644 index 0000000000000000000000000000000000000000..4e196a3794670d241bf8a26b91d409d260ac193d --- /dev/null +++ b/PhysicsAnalysis/NeutrinoSearch/python/FlukaSearchConfig.py @@ -0,0 +1,140 @@ +""" + Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration +""" + +from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator +from AthenaConfiguration.ComponentFactory import CompFactory +from MagFieldServices.MagFieldServicesConfig import MagneticFieldSvcCfg + +def NeutrinoSearchAlgCfg(flags, **kwargs): + # Initialize GeoModel + from FaserGeoModel.FaserGeoModelConfig import FaserGeometryCfg + acc = FaserGeometryCfg(flags) + + acc.merge(MagneticFieldSvcCfg(flags)) + # acc.merge(FaserActsTrackingGeometrySvcCfg(flags)) + # acc.merge(FaserActsAlignmentCondAlgCfg(flags)) + + actsExtrapolationTool = CompFactory.FaserActsExtrapolationTool("FaserActsExtrapolationTool") + actsExtrapolationTool.MaxSteps = 1000 + actsExtrapolationTool.TrackingGeometryTool = CompFactory.FaserActsTrackingGeometryTool("TrackingGeometryTool") + + NeutrinoSearchAlg = CompFactory.NeutrinoSearchAlg("NeutrinoSearchAlg",**kwargs) + NeutrinoSearchAlg.ExtrapolationTool = actsExtrapolationTool + acc.addEventAlgo(NeutrinoSearchAlg) + + thistSvc = CompFactory.THistSvc() + thistSvc.Output += ["HIST2 DATAFILE='FlukaSearch.root' OPT='RECREATE'"] + acc.addService(thistSvc) + + return acc + +if __name__ == "__main__": + + import sys + from AthenaCommon.Logging import log, logging + from AthenaCommon.Constants import DEBUG, VERBOSE, INFO + from AthenaCommon.Configurable import Configurable + from CalypsoConfiguration.AllConfigFlags import ConfigFlags + from AthenaConfiguration.TestDefaults import defaultTestFiles + from CalypsoConfiguration.MainServicesConfig import MainServicesCfg + from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg + # from OutputStreamAthenaPool.OutputStreamConfig import OutputStreamCfg + + # Set up logging and new style config + log.setLevel(DEBUG) + Configurable.configurableRun3Behavior = True + + # Configure + ConfigFlags.Input.Files = [ + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00000-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00001-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00002-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00003-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00004-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00005-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00006-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00007-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00008-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00009-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00010-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00011-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00012-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00013-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00014-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00015-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00016-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00017-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00018-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00019-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00020-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00021-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00022-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00023-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00024-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00025-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00026-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210001/rec/r0008/FaserMC-MDC_Fluka_unit30_Nm_71m_m3750_v3-210001-00027-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00000-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00001-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00002-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00003-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00004-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00005-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00006-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00007-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00008-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00009-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00010-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00011-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00012-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00013-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00014-s0005-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/fluka/210002/rec/r0008/FaserMC-MDC_Fluka_unit30_Pm_71m_m3750_v3-210002-00015-s0005-r0008-xAOD.root' + ] + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" # Always needed; must match FaserVersionS + ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions for now + ConfigFlags.Input.ProjectName = "data21" # Needed to bypass autoconfig + ConfigFlags.Input.isMC = True # Needed to bypass autoconfig + ConfigFlags.GeoModel.FaserVersion = "FASERNU-03" # FASER geometry + ConfigFlags.Common.isOnline = False + ConfigFlags.GeoModel.Align.Dynamic = False + ConfigFlags.Beam.NumberOfCollisions = 0. + + ConfigFlags.Detector.GeometryFaserSCT = True + + ConfigFlags.lock() + + # Core components + acc = MainServicesCfg(ConfigFlags) + acc.merge(PoolReadCfg(ConfigFlags)) + + # algorithm + acc.merge(NeutrinoSearchAlgCfg(ConfigFlags, UseFlukaWeights=True)) + + # # Hack to avoid problem with our use of MC databases when isMC = False + # replicaSvc = acc.getService("DBReplicaSvc") + # replicaSvc.COOLSQLiteVetoPattern = "" + # replicaSvc.UseCOOLSQLite = True + # replicaSvc.UseCOOLFrontier = False + # replicaSvc.UseGeomSQLite = True + + # Timing + #acc.merge(MergeRecoTimingObjCfg(ConfigFlags)) + + # Dump config + # logging.getLogger('forcomps').setLevel(VERBOSE) + # acc.foreach_component("*").OutputLevel = VERBOSE + # acc.foreach_component("*ClassID*").OutputLevel = INFO + # acc.getCondAlgo("FaserSCT_AlignCondAlg").OutputLevel = VERBOSE + # acc.getCondAlgo("FaserSCT_DetectorElementCondAlg").OutputLevel = VERBOSE + # acc.getService("StoreGateSvc").Dump = True + # acc.getService("ConditionStore").Dump = True + # acc.printConfig(withDetails=True) + # ConfigFlags.dump() + + # Execute and finish + sc = acc.run(maxEvents=-1) + + # Success should be 0 + sys.exit(not sc.isSuccess()) diff --git a/PhysicsAnalysis/NeutrinoSearch/python/GenieSearchConfig.py b/PhysicsAnalysis/NeutrinoSearch/python/GenieSearchConfig.py new file mode 100644 index 0000000000000000000000000000000000000000..a80de32bbc0643dbaca1ac12e28688535951b709 --- /dev/null +++ b/PhysicsAnalysis/NeutrinoSearch/python/GenieSearchConfig.py @@ -0,0 +1,104 @@ +""" + Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration +""" + +from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator +from AthenaConfiguration.ComponentFactory import CompFactory +from MagFieldServices.MagFieldServicesConfig import MagneticFieldSvcCfg + +def NeutrinoSearchAlgCfg(flags, **kwargs): + # Initialize GeoModel + from FaserGeoModel.FaserGeoModelConfig import FaserGeometryCfg + acc = FaserGeometryCfg(flags) + + acc.merge(MagneticFieldSvcCfg(flags)) + # acc.merge(FaserActsTrackingGeometrySvcCfg(flags)) + # acc.merge(FaserActsAlignmentCondAlgCfg(flags)) + + actsExtrapolationTool = CompFactory.FaserActsExtrapolationTool("FaserActsExtrapolationTool") + actsExtrapolationTool.MaxSteps = 1000 + actsExtrapolationTool.TrackingGeometryTool = CompFactory.FaserActsTrackingGeometryTool("TrackingGeometryTool") + + NeutrinoSearchAlg = CompFactory.NeutrinoSearchAlg("NeutrinoSearchAlg",**kwargs) + NeutrinoSearchAlg.ExtrapolationTool = actsExtrapolationTool + acc.addEventAlgo(NeutrinoSearchAlg) + + thistSvc = CompFactory.THistSvc() + thistSvc.Output += ["HIST2 DATAFILE='GenieSearch.root' OPT='RECREATE'"] + acc.addService(thistSvc) + + return acc + +if __name__ == "__main__": + + import sys + from AthenaCommon.Logging import log, logging + from AthenaCommon.Constants import DEBUG, VERBOSE, INFO + from AthenaCommon.Configurable import Configurable + from CalypsoConfiguration.AllConfigFlags import ConfigFlags + from AthenaConfiguration.TestDefaults import defaultTestFiles + from CalypsoConfiguration.MainServicesConfig import MainServicesCfg + from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg + # from OutputStreamAthenaPool.OutputStreamConfig import OutputStreamCfg + + # Set up logging and new style config + log.setLevel(DEBUG) + Configurable.configurableRun3Behavior = True + + # Configure + ConfigFlags.Input.Files = [ + '/run/media/dcasper/Data/faser/genie/rec/FaserMC-MDC_Genie_all_150invfb_v1-200001-00000-00007-s0006-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/genie/rec/FaserMC-MDC_Genie_all_150invfb_v1-200001-00008-00015-s0006-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/genie/rec/FaserMC-MDC_Genie_all_150invfb_v1-200001-00016-00023-s0006-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/genie/rec/FaserMC-MDC_Genie_all_150invfb_v1-200001-00024-00031-s0006-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/genie/rec/FaserMC-MDC_Genie_all_150invfb_v1-200001-00032-00039-s0006-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/genie/rec/FaserMC-MDC_Genie_all_150invfb_v1-200001-00040-00047-s0006-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/genie/rec/FaserMC-MDC_Genie_all_150invfb_v1-200001-00048-00055-s0006-r0008-xAOD.root', + '/run/media/dcasper/Data/faser/genie/rec/FaserMC-MDC_Genie_all_150invfb_v1-200001-00056-00063-s0006-r0008-xAOD.root' + ] + ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" # Always needed; must match FaserVersionS + ConfigFlags.IOVDb.DatabaseInstance = "OFLP200" # Use MC conditions for now + ConfigFlags.Input.ProjectName = "data21" # Needed to bypass autoconfig + ConfigFlags.Input.isMC = True # Needed to bypass autoconfig + ConfigFlags.GeoModel.FaserVersion = "FASERNU-03" # FASER geometry + ConfigFlags.Common.isOnline = False + ConfigFlags.GeoModel.Align.Dynamic = False + ConfigFlags.Beam.NumberOfCollisions = 0. + + ConfigFlags.Detector.GeometryFaserSCT = True + + ConfigFlags.lock() + + # Core components + acc = MainServicesCfg(ConfigFlags) + acc.merge(PoolReadCfg(ConfigFlags)) + + # algorithm + acc.merge(NeutrinoSearchAlgCfg(ConfigFlags, UseGenieWeights=True)) + + # # Hack to avoid problem with our use of MC databases when isMC = False + # replicaSvc = acc.getService("DBReplicaSvc") + # replicaSvc.COOLSQLiteVetoPattern = "" + # replicaSvc.UseCOOLSQLite = True + # replicaSvc.UseCOOLFrontier = False + # replicaSvc.UseGeomSQLite = True + + # Timing + #acc.merge(MergeRecoTimingObjCfg(ConfigFlags)) + + # Dump config + # logging.getLogger('forcomps').setLevel(VERBOSE) + # acc.foreach_component("*").OutputLevel = VERBOSE + # acc.foreach_component("*ClassID*").OutputLevel = INFO + # acc.getCondAlgo("FaserSCT_AlignCondAlg").OutputLevel = VERBOSE + # acc.getCondAlgo("FaserSCT_DetectorElementCondAlg").OutputLevel = VERBOSE + # acc.getService("StoreGateSvc").Dump = True + # acc.getService("ConditionStore").Dump = True + # acc.printConfig(withDetails=True) + # ConfigFlags.dump() + + # Execute and finish + sc = acc.run(maxEvents=-1) + + # Success should be 0 + sys.exit(not sc.isSuccess()) diff --git a/PhysicsAnalysis/NeutrinoSearch/src/NeutrinoSearchAlg.cxx b/PhysicsAnalysis/NeutrinoSearch/src/NeutrinoSearchAlg.cxx new file mode 100644 index 0000000000000000000000000000000000000000..e782dda9e3eb7d97da42edb1b5a79528268dac9f --- /dev/null +++ b/PhysicsAnalysis/NeutrinoSearch/src/NeutrinoSearchAlg.cxx @@ -0,0 +1,637 @@ +#include "NeutrinoSearchAlg.h" +#include "TrkTrack/Track.h" +#include "TrackerRIO_OnTrack/FaserSCT_ClusterOnTrack.h" +#include "TrackerIdentifier/FaserSCT_ID.h" +#include "ScintIdentifier/VetoNuID.h" +#include "ScintIdentifier/VetoID.h" +#include "ScintIdentifier/TriggerID.h" +#include "ScintIdentifier/PreshowerID.h" +#include "FaserCaloIdentifier/EcalID.h" +#include "TrackerPrepRawData/FaserSCT_Cluster.h" +#include "Identifier/Identifier.h" +#include "TrackerReadoutGeometry/SCT_DetectorManager.h" +#include "TrackerReadoutGeometry/SiDetectorElement.h" +#include "TrackerPrepRawData/FaserSCT_Cluster.h" +#include "xAODTruth/TruthParticle.h" +#include <cmath> + + + +NeutrinoSearchAlg::NeutrinoSearchAlg(const std::string &name, + ISvcLocator *pSvcLocator) + : AthReentrantAlgorithm(name, pSvcLocator), + AthHistogramming(name), + m_histSvc("THistSvc/THistSvc", name) {} + + +StatusCode NeutrinoSearchAlg::initialize() +{ + ATH_CHECK(m_truthEventContainer.initialize()); + ATH_CHECK(m_truthParticleContainer.initialize()); + ATH_CHECK(m_trackCollection.initialize()); + ATH_CHECK(m_vetoNuContainer.initialize()); + ATH_CHECK(m_vetoContainer.initialize()); + ATH_CHECK(m_triggerContainer.initialize()); + ATH_CHECK(m_preshowerContainer.initialize()); + ATH_CHECK(m_ecalContainer.initialize()); + ATH_CHECK(m_clusterContainer.initialize()); + ATH_CHECK(m_simDataCollection.initialize()); + + ATH_CHECK(detStore()->retrieve(m_sctHelper, "FaserSCT_ID")); + ATH_CHECK(detStore()->retrieve(m_vetoNuHelper, "VetoNuID")); + ATH_CHECK(detStore()->retrieve(m_vetoHelper, "VetoID")); + ATH_CHECK(detStore()->retrieve(m_triggerHelper, "TriggerID")); + ATH_CHECK(detStore()->retrieve(m_preshowerHelper, "PreshowerID")); + ATH_CHECK(detStore()->retrieve(m_ecalHelper, "EcalID")); + + ATH_CHECK(detStore()->retrieve(m_detMgr, "SCT")); + ATH_CHECK(m_extrapolationTool.retrieve()); + + if (m_useFlukaWeights) + { + m_baseEventCrossSection = (m_flukaCrossSection * kfemtoBarnsPerMilliBarn)/m_flukaCollisions; + } + else if (m_useGenieWeights) + { + m_baseEventCrossSection = 1.0/m_genieLuminosity; + } + else + { + m_baseEventCrossSection = 1.0; + } + + m_tree = new TTree("tree", "tree"); + m_tree->Branch("run_number", &m_run_number, "run_number/I"); + m_tree->Branch("event_number", &m_event_number, "event_number/I"); + + m_tree->Branch("VetoNuPmt0", &m_vetoNu0, "vetoNu0/D"); + m_tree->Branch("VetoNuPmt1", &m_vetoNu1, "vetoNu1/D"); + + m_tree->Branch("VetoPmt00", &m_veto00, "veto00/D"); + m_tree->Branch("VetoPmt01", &m_veto01, "veto01/D"); + m_tree->Branch("VetoUpstream", &m_vetoUpstream, "vetoUpstream/D"); + m_tree->Branch("VetoPmt10", &m_veto10, "veto10/D"); + m_tree->Branch("VetoPmt11", &m_veto11, "veto11/D"); + m_tree->Branch("VetoDownstream", &m_vetoDownstream, "vetoDownstream/D"); + + m_tree->Branch("TriggerPmt00", &m_trigger00, "trigger00/D"); + m_tree->Branch("TriggerPmt01", &m_trigger01, "trigger01/D"); + m_tree->Branch("TriggerPmt10", &m_trigger10, "trigger10/D"); + m_tree->Branch("TriggerPmt11", &m_trigger11, "trigger11/D"); + m_tree->Branch("TriggerTotal", &m_triggerTotal, "triggerTotal/D"); + + m_tree->Branch("PreshowerPmt0", &m_preshower0, "preshower0/D"); + m_tree->Branch("PreshowerPmt1", &m_preshower1, "preshower1/D"); + + m_tree->Branch("EcalPmt00", &m_ecal00, "ecal00/D"); + m_tree->Branch("EcalPmt01", &m_ecal01, "ecal01/D"); + m_tree->Branch("EcalPmt10", &m_ecal10, "ecal10/D"); + m_tree->Branch("EcalPmt11", &m_ecal11, "ecal11/D"); + m_tree->Branch("EcalTotal", &m_ecalTotal, "ecalTotal/D"); + + m_tree->Branch("Clust0", &m_station0Clusters, "clust0/I"); + m_tree->Branch("Clust1", &m_station1Clusters, "clust0/I"); + m_tree->Branch("Clust2", &m_station2Clusters, "clust0/I"); + m_tree->Branch("Clust3", &m_station3Clusters, "clust0/I"); + + m_tree->Branch("x", &m_x, "x/D"); + m_tree->Branch("y", &m_y, "y/D"); + m_tree->Branch("z", &m_z, "z/D"); + m_tree->Branch("px", &m_px, "px/D"); + m_tree->Branch("py", &m_py, "py/D"); + m_tree->Branch("pz", &m_pz, "pz/D"); + m_tree->Branch("p", &m_p, "p/D"); + m_tree->Branch("charge", &m_charge, "charge/I"); + m_tree->Branch("chi2", &m_chi2, "chi2/D"); + m_tree->Branch("ndof", &m_ndof, "ndof/I"); + m_tree->Branch("longTracks", &m_longTracks, "longTracks/I"); + m_tree->Branch("pTruthLepton", &m_truthLeptonMomentum, "pTruthLepton/D"); + m_tree->Branch("truthBarcode", &m_truthBarcode, "truthBarcode/I"); + m_tree->Branch("truthPdg", &m_truthPdg, "truthPdg/I"); + m_tree->Branch("CrossSection", &m_crossSection, "crossSection/D"); + + ATH_CHECK(histSvc()->regTree("/HIST2/tree", m_tree)); + + if (m_enforceBlinding) + { + ATH_MSG_INFO("Blinding will be enforced for real data."); + } + else + { + ATH_MSG_INFO("Blinding will NOT be enforced for real data."); + } + + return StatusCode::SUCCESS; +} + + +StatusCode NeutrinoSearchAlg::execute(const EventContext &ctx) const +{ + + clearTree(); + + m_run_number = ctx.eventID().run_number(); + m_event_number = ctx.eventID().event_number(); + + bool realData = true; + + // Work out effective cross section for MC + SG::ReadHandle<xAOD::TruthEventContainer> truthEventContainer { m_truthEventContainer, ctx }; + if (truthEventContainer.isValid() && truthEventContainer->size() > 0) + { + realData = false; + if (m_useFlukaWeights) + { + double flukaWeight = truthEventContainer->at(0)->weights()[0]; + ATH_MSG_ALWAYS("Found fluka weight = " << flukaWeight); + m_crossSection = m_baseEventCrossSection * flukaWeight; + } + else if (m_useGenieWeights) + { + m_crossSection = m_baseEventCrossSection; + } + else + { + ATH_MSG_WARNING("Monte carlo event with no weighting scheme specified. Setting crossSection (weight) to " << m_baseEventCrossSection << " fb."); + m_crossSection = m_baseEventCrossSection; + } + } + + // Find the primary lepton (if any) + SG::ReadHandle<xAOD::TruthParticleContainer> truthParticleContainer { m_truthParticleContainer, ctx }; + if (truthParticleContainer.isValid() && truthParticleContainer->size() > 0) + { + for (auto particle : *truthParticleContainer) + { + if ( particle->absPdgId() == 11 || particle->absPdgId() == 13 || particle->absPdgId() == 15 ) + { + if (particle->status() == 1 && (particle->nParents() == 0 || particle->nParents() == 2) ) + m_truthLeptonMomentum = particle->p4().P(); + + break; + } + } + } + + SG::ReadHandle<xAOD::WaveformHitContainer> vetoNuContainer { m_vetoNuContainer, ctx }; + ATH_CHECK(vetoNuContainer.isValid()); + + // If real data, check for blinding before we do anything else + + bool blinded = realData; + for (auto hit : *vetoNuContainer) + { + if (!waveformHitOK(hit)) continue; + blinded = false; + auto id = hit->identify(); + if (m_vetoNuHelper->plate(id) == 0) + { + m_vetoNu0 += hit->integral(); + } + else if (m_vetoNuHelper->plate(id) == 1) + { + m_vetoNu1 += hit->integral(); + } + else + { + ATH_MSG_FATAL("Invalid VetoNu plate number: " << m_vetoNuHelper->plate(id)); + return StatusCode::FAILURE; + } + } + + if (m_enforceBlinding && blinded) return StatusCode::SUCCESS; + + SG::ReadHandle<xAOD::WaveformHitContainer> vetoContainer { m_vetoContainer, ctx }; + ATH_CHECK(vetoContainer.isValid()); + + SG::ReadHandle<xAOD::WaveformHitContainer> triggerContainer { m_triggerContainer, ctx }; + ATH_CHECK(triggerContainer.isValid()); + + SG::ReadHandle<xAOD::WaveformHitContainer> preshowerContainer { m_preshowerContainer, ctx }; + ATH_CHECK(preshowerContainer.isValid()); + + SG::ReadHandle<xAOD::WaveformHitContainer> ecalContainer { m_ecalContainer, ctx }; + ATH_CHECK(ecalContainer.isValid()); + + for (auto hit : *vetoContainer) + { + if (!waveformHitOK(hit)) continue; + auto id = hit->identify(); + auto station = m_vetoHelper->station(id); + auto plate = m_vetoHelper->plate(id); + if (station == 0) + { + if (plate == 0) + { + m_veto00 += hit->integral(); + m_vetoUpstream += hit->integral(); + } + else if (plate == 1) + { + m_veto01 += hit->integral(); + m_vetoUpstream += hit->integral(); + } + else + { + ATH_MSG_FATAL("Invalid Veto plate number: " << plate); + } + } + else if (station == 1) + { + if (plate == 0) + { + m_veto10 += hit->integral(); + m_vetoDownstream += hit->integral(); + } + else if (plate == 1) + { + m_veto11 += hit->integral(); + m_vetoDownstream += hit->integral(); + } + else + { + ATH_MSG_FATAL("Invalid Veto plate number: " << plate); + } + } + else + { + ATH_MSG_FATAL("Invalid Veto station number: " << station); + return StatusCode::FAILURE; + } + } + + for (auto hit : *triggerContainer) + { + if (!waveformHitOK(hit)) continue; + auto id = hit->identify(); + auto plate = m_triggerHelper->plate(id); + auto pmt = m_triggerHelper->pmt(id); + if (plate == 0) + { + if (pmt == 0) + { + m_trigger00 += hit->integral(); + m_triggerTotal += hit->integral(); + } + else if (pmt == 1) + { + m_trigger01 += hit->integral(); + m_triggerTotal += hit->integral(); + } + else + { + ATH_MSG_FATAL("Invalid Trigger pmt number: " << pmt); + } + } + else if (plate == 1) + { + if (pmt == 0) + { + m_trigger10 += hit->integral(); + m_triggerTotal += hit->integral(); + } + else if (pmt == 1) + { + m_trigger11 += hit->integral(); + m_triggerTotal += hit->integral(); + } + else + { + ATH_MSG_FATAL("Invalid Trigger pmt number: " << pmt); + } + } + else + { + ATH_MSG_FATAL("Invalid Trigger plate number: " << plate); + return StatusCode::FAILURE; + } + } + + for (auto hit : *preshowerContainer) + { + if (!waveformHitOK(hit)) continue; + auto id = hit->identify(); + if (m_preshowerHelper->plate(id) == 0) + { + m_preshower0 += hit->integral(); + } + else if (m_preshowerHelper->plate(id) == 1) + { + m_preshower1 += hit->integral(); + } + else + { + ATH_MSG_FATAL("Invalid Preshower plate number: " << m_preshowerHelper->plate(id)); + return StatusCode::FAILURE; + } + } + + for (auto hit : *ecalContainer) + { + if (!waveformHitOK(hit)) continue; + auto id = hit->identify(); + auto row = m_ecalHelper->row(id); + auto mod = m_ecalHelper->module(id); + if (row == 0) + { + if (mod == 0) + { + m_ecal00 += hit->integral(); + m_ecalTotal += hit->integral(); + } + else if (mod == 1) + { + m_ecal01 += hit->integral(); + m_ecalTotal += hit->integral(); + } + else + { + ATH_MSG_FATAL("Invalid Ecal module number: " << mod); + } + } + else if (row == 1) + { + if (mod == 0) + { + m_ecal10 += hit->integral(); + m_ecalTotal += hit->integral(); + } + else if (mod == 1) + { + m_ecal11 += hit->integral(); + m_ecalTotal += hit->integral(); + } + else + { + ATH_MSG_FATAL("Invalid Ecal module number: " << mod); + } + } + else + { + ATH_MSG_FATAL("Invalid Ecal row number: " << row); + return StatusCode::FAILURE; + } + } + + SG::ReadHandle<Tracker::FaserSCT_ClusterContainer> clusterContainer { m_clusterContainer, ctx }; + ATH_CHECK(clusterContainer.isValid()); + + for (auto collection : *clusterContainer) + { + Identifier id = collection->identify(); + int station = m_sctHelper->station(id); + int clusters = (int) collection->size(); + switch (station) + { + case 0: + m_station0Clusters += clusters; + break; + case 1: + m_station1Clusters += clusters; + break; + case 2: + m_station2Clusters += clusters; + break; + case 3: + m_station3Clusters += clusters; + break; + default: + ATH_MSG_FATAL("Unknown tracker station number " << station); + break; + } + } + + + SG::ReadHandle<TrackCollection> trackCollection {m_trackCollection, ctx}; + ATH_CHECK(trackCollection.isValid()); + + const Trk::TrackParameters* candidateParameters {nullptr}; + const Trk::Track* candidateTrack {nullptr}; + + for (const Trk::Track* track : *trackCollection) + { + if (track == nullptr) continue; + std::set<int> stationMap; + std::set<std::pair<int, int>> layerMap; + + // Check for hit in the three downstream stations + for (auto measurement : *(track->measurementsOnTrack())) + { + const Tracker::FaserSCT_ClusterOnTrack* cluster = dynamic_cast<const Tracker::FaserSCT_ClusterOnTrack*>(measurement); + if (cluster != nullptr) + { + Identifier id = cluster->identify(); + int station = m_sctHelper->station(id); + int layer = m_sctHelper->layer(id); + stationMap.emplace(station); + layerMap.emplace(station, layer); + } + } + if (stationMap.count(1) == 0 || stationMap.count(2) == 0 || stationMap.count(3) == 0) continue; + + int nLayers = std::count_if(layerMap.begin(), layerMap.end(), [](std::pair<int,int> p){return p.first != 0;}); + if (nLayers < m_minTrackerLayers) continue; + m_longTracks++; + const Trk::TrackParameters* upstreamParameters {nullptr}; + for (auto params : *(track->trackParameters())) + { + if (upstreamParameters == nullptr || params->position().z() < upstreamParameters->position().z()) upstreamParameters = params; + } + if (candidateParameters == nullptr || upstreamParameters->momentum().mag() > candidateParameters->momentum().mag()) + { + candidateParameters = upstreamParameters; + candidateTrack = track; + m_chi2 = track->fitQuality()->chiSquared(); + m_ndof = track->fitQuality()->numberDoF(); + } + } + + SG::ReadHandle<TrackerSimDataCollection> simDataCollection {m_simDataCollection, ctx}; +// ATH_MSG_INFO("SimData valid? " << simDataCollection.isValid()); + if (candidateTrack != nullptr && simDataCollection.isValid()) + { + std::map<int, float> truthMap; + for (auto measurement : *(candidateTrack->measurementsOnTrack())) + { + const Tracker::FaserSCT_ClusterOnTrack* cluster = dynamic_cast<const Tracker::FaserSCT_ClusterOnTrack*>(measurement); + if (cluster != nullptr) + { + // ATH_MSG_INFO("ClusterOnTrack is OK"); + cluster->dump(msg()); + +// Hack to work around issue with cluster navigation + + auto idRDO = cluster->identify(); + + if (simDataCollection->count(idRDO) > 0) + { + // ATH_MSG_INFO("rdo entry found"); + const auto& simdata = simDataCollection->find(idRDO)->second; + const auto& deposits = simdata.getdeposits(); + //loop through deposits and record contributions + HepMcParticleLink primary{}; + for( const auto& depositPair : deposits) + { + // ATH_MSG_INFO("Deposit found"); + float eDep = depositPair.second; + int barcode = depositPair.first->barcode(); + // if( depositPair.second > highestDep) + // { + // highestDep = depositPair.second; + // barcode = depositPair.first->barcode(); + // primary = depositPair.first; + // depositPair.first->print(std::cout); + // ATH_MSG_INFO("pdg id "<<depositPair.first->pdg_id()); + // } + if (truthMap.count(barcode) > 0) + { + truthMap[barcode] += eDep; + } + else + { + truthMap[barcode] = eDep; + } + } + } + + + + + // // const Tracker::FaserSCT_Cluster* origCluster = dynamic_cast<const Tracker::FaserSCT_Cluster*>(cluster->prepRawData()); + // auto origCluster = cluster->prepRawData(); + // if (origCluster != nullptr) + // { + // ATH_MSG_INFO("Orig Cluster is OK"); + // auto rdoList = origCluster->rdoList(); + // for (auto idRDO : rdoList) + // { + // ATH_MSG_INFO("rdoList not empty"); + // if (simDataCollection->count(idRDO) > 0) + // { + // ATH_MSG_INFO("rdo entry found"); + // const auto& simdata = simDataCollection->find(idRDO)->second; + // const auto& deposits = simdata.getdeposits(); + // //loop through deposits and record contributions + // HepMcParticleLink primary{}; + // for( const auto& depositPair : deposits) + // { + // ATH_MSG_INFO("Deposit found"); + // float eDep = depositPair.second; + // int barcode = depositPair.first->barcode(); + // // if( depositPair.second > highestDep) + // // { + // // highestDep = depositPair.second; + // // barcode = depositPair.first->barcode(); + // // primary = depositPair.first; + // // depositPair.first->print(std::cout); + // // ATH_MSG_INFO("pdg id "<<depositPair.first->pdg_id()); + // // } + // if (truthMap.count(barcode) > 0) + // { + // truthMap[barcode] += eDep; + // } + // else + // { + // truthMap[barcode] = eDep; + // } + // } + // } + // } + // } + } + } + std::vector<std::pair<int, float>> truth(truthMap.begin(), truthMap.end()); + std::sort(truth.begin(), truth.end(), [](auto v1, auto v2) { return v1.second > v2.second; }); + if (truth.size()>0) ATH_MSG_ALWAYS("Selected track truth info:"); + for (auto v : truth) + { + auto truthParticle = (*(std::find_if(truthParticleContainer->cbegin(), truthParticleContainer->cend(), [v](const xAOD::TruthParticle* p){ return p->barcode() == v.first; }))); + if (m_truthPdg == 0) m_truthPdg = truthParticle->pdgId(); + if (m_truthBarcode == 0) m_truthBarcode = v.first; + ATH_MSG_ALWAYS("truth info: barcode = " << v.first << " ( " << truthParticle->p4().P()/1000 << " GeV/c, Id code = " << truthParticle->pdgId() << ") -> deposited energy: " << v.second/1000); + } + } + + if (candidateParameters != nullptr) + { + m_x = candidateParameters->position().x(); + m_y = candidateParameters->position().y(); + m_z = candidateParameters->position().z(); + m_px = candidateParameters->momentum().x(); + m_py = candidateParameters->momentum().y(); + m_pz = candidateParameters->momentum().z(); + m_p = sqrt(m_px * m_px + m_py * m_py + m_pz * m_pz); + m_charge = (int) candidateParameters->charge(); + } + + // Here we apply the signal selection + // Very simple/unrealistic to start + if (m_vetoUpstream == 0 || m_vetoDownstream == 0 || + m_triggerTotal == 0 || + m_preshower0 == 0 || m_preshower1 == 0 || + // m_ecalTotal == 0 || + candidateParameters == nullptr) + return StatusCode::SUCCESS; + + m_tree->Fill(); + + return StatusCode::SUCCESS; +} + + +StatusCode NeutrinoSearchAlg::finalize() +{ + return StatusCode::SUCCESS; +} + +bool NeutrinoSearchAlg::waveformHitOK(const xAOD::WaveformHit* hit) const +{ + if (hit->status_bit(xAOD::WaveformStatus::THRESHOLD_FAILED) || hit->status_bit(xAOD::WaveformStatus::SECONDARY)) return false; + return true; +} + +void +NeutrinoSearchAlg::clearTree() const +{ + m_run_number = 0; + m_event_number = 0; + m_vetoNu0 = 0; + m_vetoNu1 = 0; + m_veto00 = 0; + m_veto01 = 0; + m_veto10 = 0; + m_veto11 = 0; + m_vetoUpstream = 0; + m_vetoDownstream = 0; + m_trigger00 = 0; + m_trigger01 = 0; + m_trigger10 = 0; + m_trigger11 = 0; + m_triggerTotal = 0; + m_preshower0 = 0; + m_preshower1 = 0; + m_ecal00 = 0; + m_ecal01 = 0; + m_ecal10 = 0; + m_ecal11 = 0; + m_ecalTotal = 0; + m_station0Clusters = 0; + m_station1Clusters = 0; + m_station2Clusters = 0; + m_station3Clusters = 0; + m_crossSection = 0; + m_chi2 = 0; + m_ndof = 0; + m_px = 0; + m_py = 0; + m_pz = 0; + m_p = 0; + m_charge = 0; + m_x = 0; + m_y = 0; + m_z = 0; + m_longTracks = 0; + m_truthLeptonMomentum = 0; + m_truthBarcode = 0; + m_truthPdg = 0; +} \ No newline at end of file diff --git a/PhysicsAnalysis/NeutrinoSearch/src/NeutrinoSearchAlg.h b/PhysicsAnalysis/NeutrinoSearch/src/NeutrinoSearchAlg.h new file mode 100644 index 0000000000000000000000000000000000000000..d1732eb1238a15bb7b2fb397869b3c4486aa50b3 --- /dev/null +++ b/PhysicsAnalysis/NeutrinoSearch/src/NeutrinoSearchAlg.h @@ -0,0 +1,136 @@ +#ifndef NEUTRINOSEARCH_NEUTRINOSEARCHALG_H +#define NEUTRINOSEARCH_NEUTRINOSEARCHALG_H + +#include "AthenaBaseComps/AthReentrantAlgorithm.h" +#include "AthenaBaseComps/AthHistogramming.h" +#include "TrkTrack/TrackCollection.h" +#include "xAODFaserWaveform/WaveformHitContainer.h" +#include "xAODFaserWaveform/WaveformHit.h" +#include "xAODTruth/TruthEventContainer.h" +#include "xAODTruth/TruthParticleContainer.h" +#include "TrackerPrepRawData/FaserSCT_ClusterContainer.h" +#include "TrackerSimData/TrackerSimDataCollection.h" +#include "FaserActsGeometryInterfaces/IFaserActsExtrapolationTool.h" + + +class TTree; +class FaserSCT_ID; +class VetoNuID; +class VetoID; +class TriggerID; +class PreshowerID; +class EcalID; +namespace TrackerDD +{ + class SCT_DetectorManager; +} + +class NeutrinoSearchAlg : public AthReentrantAlgorithm, AthHistogramming { +public: + NeutrinoSearchAlg(const std::string &name, ISvcLocator *pSvcLocator); + virtual ~NeutrinoSearchAlg() = default; + virtual StatusCode initialize() override; + virtual StatusCode execute(const EventContext &ctx) const override; + virtual StatusCode finalize() override; + const ServiceHandle <ITHistSvc> &histSvc() const; + +private: + + bool waveformHitOK(const xAOD::WaveformHit* hit) const; + void clearTree() const; + + ServiceHandle <ITHistSvc> m_histSvc; + + SG::ReadHandleKey<xAOD::TruthEventContainer> m_truthEventContainer { this, "EventContainer", "TruthEvents", "Truth event container name." }; + SG::ReadHandleKey<xAOD::TruthParticleContainer> m_truthParticleContainer { this, "ParticleContainer", "TruthParticles", "Truth particle container name." }; + SG::ReadHandleKey<TrackerSimDataCollection> m_simDataCollection {this, "TrackerSimDataCollection", "SCT_SDO_Map"}; + + SG::ReadHandleKey<TrackCollection> m_trackCollection { this, "TrackCollection", "CKFTrackCollection", "Input track collection name" }; + SG::ReadHandleKey<xAOD::WaveformHitContainer> m_vetoNuContainer { this, "VetoNuContainer", "VetoNuWaveformHits", "VetoNu hit container name" }; + SG::ReadHandleKey<xAOD::WaveformHitContainer> m_vetoContainer { this, "VetoContainer", "VetoWaveformHits", "Veto hit container name" }; + SG::ReadHandleKey<xAOD::WaveformHitContainer> m_triggerContainer { this, "TriggerContainer", "TriggerWaveformHits", "Trigger hit container name" }; + SG::ReadHandleKey<xAOD::WaveformHitContainer> m_preshowerContainer { this, "PreshowerContainer", "PreshowerWaveformHits", "Preshower hit container name" }; + SG::ReadHandleKey<xAOD::WaveformHitContainer> m_ecalContainer { this, "EcalContainer", "CaloWaveformHits", "Ecal hit container name" }; + SG::ReadHandleKey<Tracker::FaserSCT_ClusterContainer> m_clusterContainer { this, "ClusterContainer", "SCT_ClusterContainer", "Tracker cluster container name" }; + + ToolHandle<IFaserActsExtrapolationTool> m_extrapolationTool { this, "ExtrapolationTool", "FaserActsExtrapolationTool" }; + const TrackerDD::SCT_DetectorManager* m_detMgr {nullptr}; + + const FaserSCT_ID* m_sctHelper; + const VetoNuID* m_vetoNuHelper; + const VetoID* m_vetoHelper; + const TriggerID* m_triggerHelper; + const PreshowerID* m_preshowerHelper; + const EcalID* m_ecalHelper; + + // TODO: use realistic thresholds for MIP +// DoubleProperty m_vetoNuThreshold { this, "VetoNuThreshold", 0, "Threshold for VetoNu pmts" }; +// DoubleProperty m_vetoThreshold { this, "VetoThreshold", 0, "Threshold for Veto pmts" }; +// DoubleProperty m_triggerThreshold { this, "TriggerThreshold", 0, "Threshold for Trigger pmts" }; +// DoubleProperty m_preshowerThreshold { this, "PreshowerThreshold", 0, "Threshold for Preshower pmts" }; +// DoubleProperty m_ecalThreshold { this, "EcalThreshold", 0, "Threshold for Ecal pmts" }; + IntegerProperty m_minTrackerLayers { this, "MinTrackerLayers", 7, "Minimum number of layers with hits on track" }; + + BooleanProperty m_useFlukaWeights { this, "UseFlukaWeights", false, "Flag to weight events according to value stored in HepMC::GenEvent" }; + BooleanProperty m_useGenieWeights { this, "UseGenieWeights", false, "Flag to weight events according to Genie luminosity" }; + IntegerProperty m_flukaCollisions { this, "FlukaCollisions", 137130000, "Number of proton-proton collisions in FLUKA sample." }; + DoubleProperty m_flukaCrossSection { this, "FlukaCrossSection", 80.0, "Fluka p-p inelastic cross-section in millibarns." }; + DoubleProperty m_genieLuminosity { this, "GenieLuminosity", 150.0, "Genie luminosity in inverse fb." }; + +// BooleanProperty m_enforceBlinding { this, "EnforceBlinding", true, "Ignore data events with no VetoNu signals." }; + const bool m_enforceBlinding {true}; + + double m_baseEventCrossSection {1.0}; + const double kfemtoBarnsPerMilliBarn {1.0e12}; + + mutable TTree* m_tree; + mutable unsigned int m_run_number; + mutable unsigned int m_event_number; + mutable double m_vetoNu0; + mutable double m_vetoNu1; + mutable double m_veto00; + mutable double m_veto01; + mutable double m_vetoUpstream; + mutable double m_veto10; + mutable double m_veto11; + mutable double m_vetoDownstream; + mutable double m_trigger00; + mutable double m_trigger01; + mutable double m_trigger10; + mutable double m_trigger11; + mutable double m_triggerTotal; + mutable double m_preshower0; + mutable double m_preshower1; + mutable double m_ecal00; + mutable double m_ecal01; + mutable double m_ecal10; + mutable double m_ecal11; + mutable double m_ecalTotal; + mutable int m_station0Clusters; + mutable int m_station1Clusters; + mutable int m_station2Clusters; + mutable int m_station3Clusters; + + mutable double m_x; + mutable double m_y; + mutable double m_z; + mutable double m_px; + mutable double m_py; + mutable double m_pz; + mutable double m_p; + mutable int m_charge; + mutable double m_chi2; + mutable int m_ndof; + mutable int m_longTracks; + mutable double m_truthLeptonMomentum; + mutable int m_truthBarcode; + mutable int m_truthPdg; + mutable double m_crossSection; + +}; + +inline const ServiceHandle <ITHistSvc> &NeutrinoSearchAlg::histSvc() const { + return m_histSvc; +} + +#endif // NEUTRINOSEARCH_NEUTRINOSEARCHALG_H diff --git a/PhysicsAnalysis/NeutrinoSearch/src/component/NeutrinoSearch_entries.cxx b/PhysicsAnalysis/NeutrinoSearch/src/component/NeutrinoSearch_entries.cxx new file mode 100644 index 0000000000000000000000000000000000000000..d47072b60eb09ca5e38a86763f85ff0c5bc36e94 --- /dev/null +++ b/PhysicsAnalysis/NeutrinoSearch/src/component/NeutrinoSearch_entries.cxx @@ -0,0 +1,3 @@ +#include "../NeutrinoSearchAlg.h" + +DECLARE_COMPONENT(NeutrinoSearchAlg) \ No newline at end of file diff --git a/Scintillator/ScintDigiAlgs/python/ScintDigiAlgsConfig.py b/Scintillator/ScintDigiAlgs/python/ScintDigiAlgsConfig.py index ddf3074618aff121e704b2214309a14952436837..407ab913201a42845581e5543e1455b7b89c5867 100644 --- a/Scintillator/ScintDigiAlgs/python/ScintDigiAlgsConfig.py +++ b/Scintillator/ScintDigiAlgs/python/ScintDigiAlgsConfig.py @@ -11,19 +11,21 @@ from WaveformConditionsTools.WaveformCableMappingConfig import WaveformCableMapp # Crystalball function Parameters estimated from Deion's slides uploaded at # https://indico.cern.ch/event/1099652/contributions/4626975/attachments/2352595/4013927/Faser-Physics-run3933-plots.pdf (20/01/2022) # Parameters are per scintillator source, but not per channel. +# Updated aamplitudes (norm) to match testbeam response +# Make everything except VetoNu look like the preshower dict_CB_param = {} -dict_CB_param["Trigger"]=dict(CB_alpha=-0.38, CB_n=25, CB_mean=815, CB_sigma=7.7, CB_norm = 500 ) -dict_CB_param["Timing"]=dict(CB_alpha=-0.32, CB_n=65, CB_mean=846, CB_sigma=5.3, CB_norm = 500) # copy from Preshower; Timing was not in TestBeam -dict_CB_param["Veto"]=dict(CB_alpha=-0.38, CB_n=25, CB_mean=815, CB_sigma=7.7, CB_norm = 1000) # copy from Trigger; Veto was not in TestBeam, but in sim "Veto" is the TestBeam Trigger component -dict_CB_param["VetoNu"]=dict(CB_alpha=-0.38, CB_n=25, CB_mean=815, CB_sigma=7.7, CB_norm = 1000) # copy from Trigger; Veto was not in TestBeam, but in sim "Veto" is the TestBeam Trigger component -dict_CB_param["Preshower"]=dict(CB_alpha=-0.32, CB_n=65, CB_mean=846, CB_sigma=5.3, CB_norm = 500) +dict_CB_param["Trigger"]=dict(CB_alpha=-0.424, CB_n=6.14, CB_mean=815, CB_sigma=3.21, CB_norm = 4240) +dict_CB_param["Timing"] =dict(CB_alpha=-0.424, CB_n=6.14, CB_mean=846, CB_sigma=3.21, CB_norm = 4240) +dict_CB_param["Veto"] =dict(CB_alpha=-0.32, CB_n=9.0, CB_mean=815, CB_sigma=3.35, CB_norm = 6840) +dict_CB_param["VetoNu"] =dict(CB_alpha=-0.28, CB_n=1000, CB_mean=815, CB_sigma=5.00, CB_norm = 7040) +dict_CB_param["Preshower"]=dict(CB_alpha=-0.32, CB_n=1000, CB_mean=846, CB_sigma=4.0, CB_norm = 400) dict_baseline_params = { - "Trigger" : {"mean" : 15000, "rms" : 3}, - "Timing" : {"mean" : 15000, "rms" : 3}, - "Veto" : {"mean" : 15000, "rms" : 3}, - "VetoNu" : {"mean" : 15000, "rms" : 3}, - "Preshower" : {"mean" : 15000, "rms" : 3}, + "Trigger" : {"mean" : 15650, "rms" : 3}, + "Timing" : {"mean" : 15650, "rms" : 3}, + "Veto" : {"mean" : 15650, "rms" : 3}, + "VetoNu" : {"mean" : 15650, "rms" : 3}, + "Preshower" : {"mean" : 15650, "rms" : 3}, } # One stop shopping for normal FASER data @@ -34,11 +36,15 @@ def ScintWaveformDigitizationCfg(flags): if not flags.Input.isMC: return acc - if "TB" not in flags.GeoModel.FaserVersion: - acc.merge(ScintWaveformDigiCfg(flags, "TimingWaveformDigiAlg", "Trigger")) - acc.merge(ScintWaveformDigiCfg(flags, "VetoWaveformDigiAlg", "Veto")) - acc.merge(ScintWaveformDigiCfg(flags, "VetoNuWaveformDigiAlg", "VetoNu")) - acc.merge(ScintWaveformDigiCfg(flags, "PreshowerWaveformDigiAlg", "Preshower")) + if "TB" in flags.GeoModel.FaserVersion: + acc.merge(ScintWaveformDigiCfg(flags, "VetoWaveformDigiAlg", "Veto")) + acc.merge(ScintWaveformDigiCfg(flags, "PreshowerWaveformDigiAlg", "Preshower")) + else: + acc.merge(ScintWaveformDigiCfg(flags, "TriggerWaveformDigiAlg", "Trigger")) + acc.merge(ScintWaveformDigiCfg(flags, "VetoWaveformDigiAlg", "Veto")) + acc.merge(ScintWaveformDigiCfg(flags, "VetoNuWaveformDigiAlg", "VetoNu")) + acc.merge(ScintWaveformDigiCfg(flags, "PreshowerWaveformDigiAlg", "Preshower")) + acc.merge(ScintWaveformDigitizationOutputCfg(flags)) acc.merge(WaveformCableMappingCfg(flags)) return acc @@ -55,6 +61,11 @@ def ScintWaveformDigiCfg(flags, name="ScintWaveformDigiAlg", source="", **kwargs kwargs.setdefault("WaveformContainerKey", source+"Waveforms") digiAlg = CompFactory.ScintWaveformDigiAlg(name, **kwargs) + + if "TB" in flags.GeoModel.FaserVersion and source == "Veto": + # The testbeam counters were actually VetoNu, so use those parameters + source = "VetoNu" + digiAlg.CB_alpha = dict_CB_param[source]["CB_alpha"] digiAlg.CB_n = dict_CB_param[source]["CB_n"] digiAlg.CB_mean = dict_CB_param[source]["CB_mean"] diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsData/data/BField_DataConditions.py b/Tracker/TrackerConditions/FaserSCT_ConditionsData/data/BField_DataConditions.py new file mode 100755 index 0000000000000000000000000000000000000000..94846ee80dedb41dbc2354a6662873203aad9cb6 --- /dev/null +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsData/data/BField_DataConditions.py @@ -0,0 +1,85 @@ +#!/bin/env python + +# Use this to add a field map to the CONDBR3 database for real data +# Copied the result from the OFLP200 DB +# Note that the testbeam turns off the field by setting scale = 0 +description = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header clid="1238547719" service_type="71" /></addrHeader><typeName>CondAttrListCollection</typeName>' + +descriptionDCS = '<timeStamp>time</timeStamp><addrHeader><address_header service_type="71" clid="1238547719" /></addrHeader><typeName>CondAttrListCollection</typeName><cache>600</cache>' + +descriptionAlign = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header service_type="256" clid="1170039409" /></addrHeader><typeName>AlignableTransformContainer</typeName>' + +import sys +from PyCool import cool, coral +from CoolConvUtilities.AtlCoolLib import indirectOpen + +dbSvc = cool.DatabaseSvcFactory.databaseService() +connectString = 'sqlite://;schema=ALLP200.db;dbname=CONDBR3' + +print('generating field database') +#dbSvc.dropDatabase( connectString ) +try: + # Open existing instead? + print('Try indirectOpen') + db = indirectOpen( connectString, readOnly=False ) +except Exception as e: + print(e) + print('Problem opening DB, create instead') + db = dbSvc.createDatabase( connectString ) + +glob = db.createFolderSet("/GLOBAL") +glob_bfield = db.createFolderSet("/GLOBAL/BField") + +glob_bfield.createTagRelation("GLOBAL-01", "GLOBAL-BField-01") +glob.createTagRelation("OFLCOND-FASER-01", "GLOBAL-01") + +glob_bfield.createTagRelation("GLOBAL-02", "GLOBAL-BField-02") +glob.createTagRelation("OFLCOND-FASER-02", "GLOBAL-02") + +glob_bfield.createTagRelation("GLOBAL-TB00", "GLOBAL-BField-TB00") +glob.createTagRelation("OFLCOND-FASER-TB00", "GLOBAL-TB00") + +mapSpec = cool.RecordSpecification() +mapSpec.extend( 'FieldType', cool.StorageType.String4k ) +mapSpec.extend( 'MapFileName', cool.StorageType.String4k ) + +mapRecord = cool.Record(mapSpec) +mapRecord['FieldType'] = "GlobalMap" +mapRecord['MapFileName'] = "file:MagneticFieldMaps/FaserFieldTable.root" + +mapFolderSpec = cool.FolderSpecification(cool.FolderVersioning.MULTI_VERSION, mapSpec) +mapFolder = db.createFolder('/GLOBAL/BField/Maps', mapFolderSpec, descriptionDCS, True ) + +mapFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, mapRecord, 1, "GLOBAL-BField-Maps-01", True ) +mapFolder.createTagRelation("GLOBAL-BField-01", "GLOBAL-BField-Maps-01") + +mapFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, mapRecord, 1, "GLOBAL-BField-Maps-02", True ) +mapFolder.createTagRelation("GLOBAL-BField-02", "GLOBAL-BField-Maps-02") + +mapFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, mapRecord, 1, "GLOBAL-BField-Maps-TB00", True ) +mapFolder.createTagRelation("GLOBAL-BField-TB00", "GLOBAL-BField-Maps-TB00") + +scaleSpec = cool.RecordSpecification() +scaleSpec.extend( 'value', cool.StorageType.Float ) + +scaleRecord = cool.Record(scaleSpec) +scaleRecord['value'] = 1.0 + +scaleFolderSpec = cool.FolderSpecification(cool.FolderVersioning.MULTI_VERSION, scaleSpec) +scaleFolder = db.createFolder('/GLOBAL/BField/Scales', scaleFolderSpec, descriptionDCS, True ) + +# Channel names don't seem to be handled properly by Athena +scaleFolder.createChannel( 1, "Dipole_Scale" ) +scaleFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, scaleRecord, 1, "GLOBAL-BField-Scale-01", True ) +scaleFolder.createTagRelation("GLOBAL-BField-01", "GLOBAL-BField-Scale-01") + + +scaleFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, scaleRecord, 1, "GLOBAL-BField-Scale-02", True ) +scaleFolder.createTagRelation("GLOBAL-BField-02", "GLOBAL-BField-Scale-02") + + +scaleRecord['value'] = 0.0 +scaleFolder.storeObject( cool.ValidityKeyMin, cool.ValidityKeyMax, scaleRecord, 1, "GLOBAL-BField-Scale-TB00", True ) +scaleFolder.createTagRelation("GLOBAL-BField-TB00", "GLOBAL-BField-Scale-TB00") + +db.closeDatabase() diff --git a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_DCSConditionsConfig.py b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_DCSConditionsConfig.py index cc1ddf6c99661d1fdbace11d74e7c3785e90be9e..42b71a58c0ba31f95f196a509134218e01e7e28c 100644 --- a/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_DCSConditionsConfig.py +++ b/Tracker/TrackerConditions/FaserSCT_ConditionsTools/python/FaserSCT_DCSConditionsConfig.py @@ -29,7 +29,7 @@ def FaserSCT_DCSConditionsCfg(flags, name="TrackerSCT_DCSConditions", **kwargs): tempFolder = kwargs.get("tempFolder", "/SCT/DCS/MODTEMP") stateFolder = kwargs.get("stateFolder", "/SCT/DCS/CHANSTAT") if tool.ReadAllDBFolders == tool.ReturnHVTemp: - acc.merge(addFolders(flags, stateFolder, dbInstance, className="CondAttrListCollection")) + acc.merge(addFolders(flags, stateFolder, dbInstance, className="CondAttrListCollection",db="OFLP200")) # algo statArgs = { "name": name + "StatCondAlg", @@ -40,7 +40,7 @@ def FaserSCT_DCSConditionsCfg(flags, name="TrackerSCT_DCSConditions", **kwargs): statAlg = FaserSCT_DCSConditionsStatCondAlg(**statArgs) acc.addCondAlgo(statAlg) if tool.ReturnHVTemp: - acc.merge(addFolders(flags, [hvFolder, tempFolder], dbInstance, className="CondAttrListCollection")) + acc.merge(addFolders(flags, [hvFolder, tempFolder], dbInstance, className="CondAttrListCollection",db="OFLP200")) hvAlg = FaserSCT_DCSConditionsHVCondAlg(name=name + "HVCondAlg", ReadKey=hvFolder) acc.addCondAlgo(hvAlg) tempAlg = FaserSCT_DCSConditionsTempCondAlg(name=name + "TempCondAlg", ReadKey=tempFolder) diff --git a/Tracker/TrackerDetDescr/FaserSCT_GeoModel/python/FaserSCT_GeoModelConfig.py b/Tracker/TrackerDetDescr/FaserSCT_GeoModel/python/FaserSCT_GeoModelConfig.py index 26dff64f141ad9f22ff4ac430ef2fbdf98065f7e..038c8c38bb3c6d0daed962726f310867873a4c02 100644 --- a/Tracker/TrackerDetDescr/FaserSCT_GeoModel/python/FaserSCT_GeoModelConfig.py +++ b/Tracker/TrackerDetDescr/FaserSCT_GeoModel/python/FaserSCT_GeoModelConfig.py @@ -2,7 +2,7 @@ from AthenaConfiguration.ComponentFactory import CompFactory from AthenaConfiguration.Enums import ProductionStep -from IOVDbSvc.IOVDbSvcConfig import addFoldersSplitOnline +from IOVDbSvc.IOVDbSvcConfig import addFolders, addFoldersSplitOnline def FaserSCT_GeometryCfg( flags ): from FaserGeoModel.GeoModelConfig import GeoModelCfg @@ -33,10 +33,11 @@ def FaserSCT_GeometryCfg( flags ): # acc.merge(addFoldersSplitOnline(flags,"INDET","/Indet/Onl/AlignL3","/Indet/AlignL3",className="AlignableTransformContainer")) print("FaserSCT dynamic align flag is not supported!") else: + print("Override Alignment dbname to OFLP200, fix this when alignment available in CONDBR3") if flags.Common.Project != "AthSimulation" and (flags.Common.ProductionStep != ProductionStep.Simulation or flags.Overlay.DataOverlay): - acc.merge(addFoldersSplitOnline(flags,"SCT","/Tracker/Onl/Align","/Tracker/Align",className="AlignableTransformContainer")) + acc.merge(addFolders(flags,"/Tracker/Align", "SCT_OFL", className="AlignableTransformContainer", db="OFLP200")) else: - acc.merge(addFoldersSplitOnline(flags,"SCT","/Tracker/Onl/Align","/Tracker/Align")) + acc.merge(addFolders(flags, "/Tracker/Align", "SCT_OFL", db="OFLP200")) if flags.Common.Project != "AthSimulation": # Protection for AthSimulation builds if flags.Common.ProductionStep != ProductionStep.Simulation or flags.Overlay.DataOverlay: FaserSCT_AlignCondAlg = CompFactory.FaserSCT_AlignCondAlg diff --git a/Tracker/TrackerEventCnv/TrackerEventCnvTools/TrackerEventCnvTools/TrackerEventCnvTool.h b/Tracker/TrackerEventCnv/TrackerEventCnvTools/TrackerEventCnvTools/TrackerEventCnvTool.h index 29ef80259242bde365ddf83b6ec8bcff3ada41ac..f56577388e62f131d349f5e40a12383cf87e8b71 100644 --- a/Tracker/TrackerEventCnv/TrackerEventCnvTools/TrackerEventCnvTools/TrackerEventCnvTool.h +++ b/Tracker/TrackerEventCnv/TrackerEventCnvTools/TrackerEventCnvTools/TrackerEventCnvTool.h @@ -89,7 +89,7 @@ class TrackerEventCnvTool : public extends<AthAlgTool, Trk::ITrkEventCnvTool> // added to check TRT existence (SLHC geo check) const IdDictManager* m_idDictMgr; - SG::ReadHandleKey<FaserSCT_ClusterContainer> m_sctClusContName {this, "SCT_ClusterContainer", "SCT_Clusters", "SCT Cluster container name"}; //!< location of container of sct clusters + SG::ReadHandleKey<FaserSCT_ClusterContainer> m_sctClusContName {this, "SCT_ClusterContainer", "SCT_ClusterContainer", "SCT Cluster container name"}; //!< location of container of sct clusters SG::ReadCondHandleKey<TrackerDD::SiDetectorElementCollection> m_SCTDetEleCollKey{this, "SCTDetEleCollKey", "SCT_DetectorElementCollection", "Key of SiDetectorElementCollection for SCT"}; }; diff --git a/Tracker/TrackerEventCnv/TrackerEventCnvTools/src/TrackerEventCnvTool.cxx b/Tracker/TrackerEventCnv/TrackerEventCnvTools/src/TrackerEventCnvTool.cxx index 92d62c99790494ae1c0a2b0cb3b62753fe0386fe..1eb3b38a4270226687a0eb12b700d8be54dde0a4 100644 --- a/Tracker/TrackerEventCnv/TrackerEventCnvTools/src/TrackerEventCnvTool.cxx +++ b/Tracker/TrackerEventCnv/TrackerEventCnvTools/src/TrackerEventCnvTool.cxx @@ -27,7 +27,7 @@ Tracker::TrackerEventCnvTool::TrackerEventCnvTool(const std::string& t, const IInterface* p ) : base_class(t,n,p), - m_setPrepRawDataLink(false), + m_setPrepRawDataLink(true), m_IDHelper(nullptr), m_SCTHelper(nullptr), m_idDictMgr(nullptr) diff --git a/Tracker/TrackerRecEvent/TrackerRIO_OnTrack/src/FaserSCT_ClusterOnTrack.cxx b/Tracker/TrackerRecEvent/TrackerRIO_OnTrack/src/FaserSCT_ClusterOnTrack.cxx index 5567e5cf9ab507ce8e7f076917055adef219e8e8..ba438dd9eef7845724e259b0a2e387a598df548b 100644 --- a/Tracker/TrackerRecEvent/TrackerRIO_OnTrack/src/FaserSCT_ClusterOnTrack.cxx +++ b/Tracker/TrackerRecEvent/TrackerRIO_OnTrack/src/FaserSCT_ClusterOnTrack.cxx @@ -87,8 +87,10 @@ const Trk::Surface& Tracker::FaserSCT_ClusterOnTrack::associatedSurface() const return ( detectorElement()->surface()); } -void Tracker::FaserSCT_ClusterOnTrack::setValues(const Trk::TrkDetElementBase* detEl, const Trk::PrepRawData* ) +void Tracker::FaserSCT_ClusterOnTrack::setValues(const Trk::TrkDetElementBase* detEl, const Trk::PrepRawData* prepRawData) { + const auto *cluster = dynamic_cast<const Tracker::FaserSCT_Cluster*>(prepRawData); + m_rio.setElement(cluster); m_detEl = dynamic_cast< const TrackerDD::SiDetectorElement* >(detEl); if (m_detEl) { // Set global position after setting the detector element diff --git a/Tracking/Acts/FaserActsGeometry/FaserActsGeometry/FaserActsLayerBuilder.h b/Tracking/Acts/FaserActsGeometry/FaserActsGeometry/FaserActsLayerBuilder.h index 9ab2ae89793253a062a63b242eb5ce4fccefae98..132fd1ca2069375eceb43531071b3ff5ebd2dbdf 100644 --- a/Tracking/Acts/FaserActsGeometry/FaserActsGeometry/FaserActsLayerBuilder.h +++ b/Tracking/Acts/FaserActsGeometry/FaserActsGeometry/FaserActsLayerBuilder.h @@ -117,7 +117,7 @@ private: Config m_cfg; Acts::Vector3 m_worldDimensions = { 400.0_mm, 400.0_mm, 8000.0_mm }; Acts::Vector3 m_worldCenter = {0.0, 0.0, 0.0}; - Acts::Vector3 m_trackerDimensions = { 400.0_mm, 400.0_mm, 50.0_mm }; + Acts::Vector3 m_trackerDimensions = { 400.0_mm, 400.0_mm, 100.0_mm }; /// Private access to the logger const Acts::Logger& diff --git a/Tracking/Acts/FaserActsKalmanFilter/CMakeLists.txt b/Tracking/Acts/FaserActsKalmanFilter/CMakeLists.txt index 6e9c7dc2f58cdd6a172e9f357915e7d18ac67ece..9377c4716810301c16fe7ca606ba31e6f5ba318c 100755 --- a/Tracking/Acts/FaserActsKalmanFilter/CMakeLists.txt +++ b/Tracking/Acts/FaserActsKalmanFilter/CMakeLists.txt @@ -71,6 +71,8 @@ atlas_add_component(FaserActsKalmanFilter src/CircleFit.cxx src/CircleFitTrackSeedTool.cxx src/CKF2.cxx + src/CreateTrkTrackTool.h + src/CreateTrkTrackTool.cxx # src/ClusterTrackSeedTool.cxx src/CombinatorialKalmanFilterAlg.cxx src/EffPlotTool.cxx diff --git a/Tracking/Acts/FaserActsKalmanFilter/FaserActsKalmanFilter/ITrackSeedTool.h b/Tracking/Acts/FaserActsKalmanFilter/FaserActsKalmanFilter/ITrackSeedTool.h index 238fbb13d82ab4c6a784ed57acedb648334c716a..1ef1a7921bdaa55381ee9487c32b7f58bca3e05c 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/FaserActsKalmanFilter/ITrackSeedTool.h +++ b/Tracking/Acts/FaserActsKalmanFilter/FaserActsKalmanFilter/ITrackSeedTool.h @@ -20,7 +20,7 @@ class ITrackSeedTool : virtual public IAlgTool { public: DeclareInterfaceID(ITrackSeedTool, 1, 0); - virtual StatusCode run() = 0; + virtual StatusCode run(std::vector<int> maskedLayers = {}) = 0; virtual const std::shared_ptr<std::vector<Acts::CurvilinearTrackParameters>> initialTrackParameters() const = 0; virtual const std::shared_ptr<const Acts::Surface> initialSurface() const = 0; virtual const std::shared_ptr<std::vector<IndexSourceLink>> sourceLinks() const = 0; diff --git a/Tracking/Acts/FaserActsKalmanFilter/python/CKF2Config.py b/Tracking/Acts/FaserActsKalmanFilter/python/CKF2Config.py index ea2dc0cb9eb08c309bf0453216230fe0b2074b4f..07861f8ba27734b240917d219dd20d6c98ab8785 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/python/CKF2Config.py +++ b/Tracking/Acts/FaserActsKalmanFilter/python/CKF2Config.py @@ -59,35 +59,28 @@ def CKF2Cfg(flags, **kwargs): track_seed_tool.TrackCollection = "Segments" trajectory_states_writer_tool = CompFactory.RootTrajectoryStatesWriterTool() - trajectory_states_writer_tool.noDiagnostics = kwargs["noDiagnostics"] + trajectory_states_writer_tool.noDiagnostics = kwargs.pop("noDiagnostics", True) trajectory_states_writer_tool1 = CompFactory.RootTrajectoryStatesWriterTool() - trajectory_states_writer_tool1.noDiagnostics = kwargs["noDiagnostics"] + trajectory_states_writer_tool1.noDiagnostics = kwargs.pop("noDiagnostics", True) trajectory_states_writer_tool1.FilePath = "track_states_ckf1.root" - trajectory_states_writer_tool2 = CompFactory.RootTrajectoryStatesWriterTool() - trajectory_states_writer_tool2.FilePath = "track_states_ckf2.root" - trajectory_states_writer_tool2.noDiagnostics = kwargs["noDiagnostics"] - trajectory_summary_writer_tool = CompFactory.RootTrajectorySummaryWriterTool(**kwargs) - trajectory_summary_writer_tool.noDiagnostics = kwargs["noDiagnostics"] + trajectory_summary_writer_tool = CompFactory.RootTrajectorySummaryWriterTool() + trajectory_summary_writer_tool.noDiagnostics = kwargs.pop("noDiagnostics", True) trajectory_summary_writer_tool1 = CompFactory.RootTrajectorySummaryWriterTool() trajectory_summary_writer_tool1.FilePath = "track_summary_ckf1.root" - trajectory_summary_writer_tool1.noDiagnostics = kwargs["noDiagnostics"] - trajectory_summary_writer_tool2 = CompFactory.RootTrajectorySummaryWriterTool(**kwargs) - trajectory_summary_writer_tool2.FilePath = "track_summary_ckf2.root" - trajectory_summary_writer_tool2.noDiagnostics = kwargs["noDiagnostics"] + trajectory_summary_writer_tool1.noDiagnostics = kwargs.pop("noDiagnostics", True) actsExtrapolationTool = CompFactory.FaserActsExtrapolationTool("FaserActsExtrapolationTool") actsExtrapolationTool.MaxSteps = 1000 actsExtrapolationTool.TrackingGeometryTool = CompFactory.FaserActsTrackingGeometryTool("TrackingGeometryTool") - trajectory_performance_writer_tool = CompFactory.PerformanceWriterTool("PerformanceWriterTool", **kwargs) + trajectory_performance_writer_tool = CompFactory.PerformanceWriterTool("PerformanceWriterTool") trajectory_performance_writer_tool.ExtrapolationTool = actsExtrapolationTool - trajectory_performance_writer_tool.noDiagnostics = kwargs["noDiagnostics"] - + trajectory_performance_writer_tool.noDiagnostics = kwargs.pop("noDiagnostics", True) ckf = CompFactory.CKF2(**kwargs) - kalman_fitter1 = CompFactory.KalmanFitterTool(name="fitterTool1", **kwargs) - kalman_fitter1.noDiagnostics = kwargs["noDiagnostics"] + kalman_fitter1 = CompFactory.KalmanFitterTool(name="fitterTool1") + kalman_fitter1.noDiagnostics = kwargs.pop("noDiagnostics", True) kalman_fitter1.ActsLogging = "INFO" kalman_fitter1.SummaryWriter = True kalman_fitter1.StatesWriter = False @@ -97,17 +90,6 @@ def CKF2Cfg(flags, **kwargs): kalman_fitter1.RootTrajectorySummaryWriterTool = trajectory_summary_writer_tool1 ckf.KalmanFitterTool1 = kalman_fitter1 - kalman_fitter2 = CompFactory.KalmanFitterTool(name="fitterTool2", **kwargs) - kalman_fitter2.noDiagnostics = kwargs["noDiagnostics"] - kalman_fitter2.ActsLogging = "INFO" - kalman_fitter2.SummaryWriter = True - kalman_fitter2.StatesWriter = False - kalman_fitter2.SeedCovarianceScale = 10 - kalman_fitter2.isMC = flags.Input.isMC - kalman_fitter2.RootTrajectoryStatesWriterTool = trajectory_states_writer_tool2 - kalman_fitter2.RootTrajectorySummaryWriterTool = trajectory_summary_writer_tool2 - ckf.KalmanFitterTool2 = kalman_fitter2 - ckf.TrackSeed = track_seed_tool ckf.ActsLogging = "INFO" ckf.RootTrajectoryStatesWriterTool = trajectory_states_writer_tool diff --git a/Tracking/Acts/FaserActsKalmanFilter/python/TI12CKF2Config.py b/Tracking/Acts/FaserActsKalmanFilter/python/TI12CKF2Config.py index 6e5f0aa6109ad02ae0ac575f476f224d85637c4c..87101612bd15a3c33c670ef3f0d6ccd5c78f4a5c 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/python/TI12CKF2Config.py +++ b/Tracking/Acts/FaserActsKalmanFilter/python/TI12CKF2Config.py @@ -61,18 +61,12 @@ def TI12CKF2Cfg(flags, **kwargs): trajectory_states_writer_tool1 = CompFactory.RootTrajectoryStatesWriterTool() trajectory_states_writer_tool1.noDiagnostics = kwargs["noDiagnostics"] trajectory_states_writer_tool1.FilePath = "track_states_ckf1.root" - trajectory_states_writer_tool2 = CompFactory.RootTrajectoryStatesWriterTool() - trajectory_states_writer_tool2.FilePath = "track_states_ckf2.root" - trajectory_states_writer_tool2.noDiagnostics = kwargs["noDiagnostics"] - trajectory_summary_writer_tool = CompFactory.RootTrajectorySummaryWriterTool(**kwargs) + trajectory_summary_writer_tool = CompFactory.RootTrajectorySummaryWriterTool() trajectory_summary_writer_tool.noDiagnostics = kwargs["noDiagnostics"] trajectory_summary_writer_tool1 = CompFactory.RootTrajectorySummaryWriterTool() trajectory_summary_writer_tool1.FilePath = "track_summary_ckf1.root" trajectory_summary_writer_tool1.noDiagnostics = kwargs["noDiagnostics"] - trajectory_summary_writer_tool2 = CompFactory.RootTrajectorySummaryWriterTool(**kwargs) - trajectory_summary_writer_tool2.FilePath = "track_summary_ckf2.root" - trajectory_summary_writer_tool2.noDiagnostics = kwargs["noDiagnostics"] actsExtrapolationTool = CompFactory.FaserActsExtrapolationTool("FaserActsExtrapolationTool") actsExtrapolationTool.MaxSteps = 1000 @@ -84,7 +78,7 @@ def TI12CKF2Cfg(flags, **kwargs): ckf = CompFactory.CKF2(**kwargs) - kalman_fitter1 = CompFactory.KalmanFitterTool(name="fitterTool1", **kwargs) + kalman_fitter1 = CompFactory.KalmanFitterTool(name="fitterTool1") kalman_fitter1.noDiagnostics = kwargs["noDiagnostics"] kalman_fitter1.ActsLogging = "INFO" kalman_fitter1.SummaryWriter = True diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.cxx index b311727d083ce5855ee6d291354e995d6d39f7a1..4161c19eecbc6b30b0436732f76498f71f3f4fa6 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.cxx +++ b/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.cxx @@ -27,7 +27,7 @@ StatusCode ActsTrackSeedTool::initialize() { } -StatusCode ActsTrackSeedTool::run() { +StatusCode ActsTrackSeedTool::run(std::vector<int> /*maskedLayers*/) { SG::ReadHandle<TrackCollection> trackCollection {m_trackCollection}; ATH_CHECK(trackCollection.isValid()); diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.h b/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.h index 95311206b6300722344935b535c4f0d963728392..41aebd49ff0c7caf5f5615a47af1641b804b45fb 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.h +++ b/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.h @@ -18,7 +18,7 @@ public: virtual ~ActsTrackSeedTool() = default; virtual StatusCode initialize() override; virtual StatusCode finalize() override; - virtual StatusCode run() override; + virtual StatusCode run(std::vector<int> /*maskedLayers*/) override; const std::shared_ptr<std::vector<Acts::CurvilinearTrackParameters>> initialTrackParameters() const override; const std::shared_ptr<const Acts::Surface> initialSurface() const override; diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.cxx index d0d6f2138289fa0c5e387ae2897e0abcbd7dca8a..858b97d6b2655859d77f027b31a57ce5782b95ae 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.cxx +++ b/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.cxx @@ -38,9 +38,8 @@ using TrajectoriesContainer = std::vector<FaserActsRecMultiTrajectory>; //std::array<Acts::BoundIndices, 2> indices = {Acts::eBoundLoc0, Acts::eBoundLoc1}; -CKF2::CKF2( - const std::string& name, ISvcLocator* pSvcLocator) - : AthAlgorithm(name, pSvcLocator) {} +CKF2::CKF2(const std::string& name, ISvcLocator* pSvcLocator) : + AthAlgorithm(name, pSvcLocator) {} StatusCode CKF2::initialize() { @@ -48,8 +47,8 @@ StatusCode CKF2::initialize() { ATH_CHECK(m_trackingGeometryTool.retrieve()); ATH_CHECK(m_trackSeedTool.retrieve()); ATH_CHECK(m_kalmanFitterTool1.retrieve()); - ATH_CHECK(m_kalmanFitterTool2.retrieve()); - // ATH_CHECK(m_trackCollection.initialize()); + ATH_CHECK(m_createTrkTrackTool.retrieve()); + ATH_CHECK(m_trackCollection.initialize()); if (m_performanceWriter && !m_noDiagnostics) { ATH_CHECK(m_performanceWriterTool.retrieve()); } @@ -79,8 +78,7 @@ StatusCode CKF2::execute() { const EventContext& ctx = Gaudi::Hive::currentContext(); m_numberOfEvents++; - ATH_CHECK(m_trackCollection.initialize()); - SG::WriteHandle<TrackCollection> trackContainer{m_trackCollection,ctx}; + SG::WriteHandle trackContainer{m_trackCollection, ctx}; std::unique_ptr<TrackCollection> outputTracks = std::make_unique<TrackCollection>(); std::shared_ptr<const Acts::TrackingGeometry> trackingGeometry @@ -91,7 +89,7 @@ StatusCode CKF2::execute() { Acts::MagneticFieldContext magFieldContext = getMagneticFieldContext(ctx); Acts::CalibrationContext calibContext; - CHECK(m_trackSeedTool->run()); + CHECK(m_trackSeedTool->run(m_maskedLayers)); std::shared_ptr<const Acts::Surface> initialSurface = m_trackSeedTool->initialSurface(); std::shared_ptr<std::vector<Acts::CurvilinearTrackParameters>> initialParameters = @@ -189,18 +187,12 @@ StatusCode CKF2::execute() { ATH_MSG_DEBUG(" position: " << params.position(gctx).transpose()); ATH_MSG_DEBUG(" momentum: " << params.momentum().transpose()); ATH_MSG_DEBUG(" charge: " << params.charge()); - std::unique_ptr<Trk::Track> track = makeTrack(gctx, traj); - if (track) { + std::unique_ptr<Trk::Track> track = m_createTrkTrackTool->createTrack(gctx, traj); + if (track != nullptr) { m_numberOfSelectedTracks++; - std::unique_ptr<Trk::Track> track2 = m_kalmanFitterTool1->fit(ctx, gctx, *track, trajectories, Acts::BoundVector::Zero(), m_isMC, origin); + std::unique_ptr<Trk::Track> track2 = m_kalmanFitterTool1->fit(ctx, gctx, track.get(), Acts::BoundVector::Zero(), m_isMC, origin); if (track2) { - std::unique_ptr<Trk::Track> track3 = m_kalmanFitterTool2->fit(ctx, gctx, *track2, trajectories, Acts::BoundVector::Zero(), m_isMC, origin); - if (track3) { - outputTracks->push_back(std::move(track3)); - } else { - outputTracks->push_back(std::move(track2)); - ATH_MSG_WARNING("Re-Fit failed."); - } + outputTracks->push_back(std::move(track2)); } else { outputTracks->push_back(std::move(track)); ATH_MSG_WARNING("Re-Fit failed."); @@ -248,191 +240,6 @@ Acts::MagneticFieldContext CKF2::getMagneticFieldContext(const EventContext& ctx } -std::unique_ptr<Trk::Track> -CKF2::makeTrack(const Acts::GeometryContext &geoCtx, const FaserActsRecMultiTrajectory &traj) const { - using ConstTrackStateProxy = - Acts::detail_lt::TrackStateProxy<IndexSourceLink, 6, true>; - std::unique_ptr<Trk::Track> newtrack = nullptr; - //Get the fit output object - DataVector<const Trk::TrackStateOnSurface>* finalTrajectory = new DataVector<const Trk::TrackStateOnSurface>{}; - std::vector<std::unique_ptr<const Acts::BoundTrackParameters>> actsSmoothedParam; - // Loop over all the output state to create track state - traj.multiTrajectory().visitBackwards(traj.tips().front(), [&](const ConstTrackStateProxy& state) { - auto flag = state.typeFlags(); - if (state.referenceSurface().associatedDetectorElement() != nullptr) { - // We need to determine the type of state - std::bitset<Trk::TrackStateOnSurface::NumberOfTrackStateOnSurfaceTypes> typePattern; - const Trk::TrackParameters *parm; - - // State is a hole (no associated measurement), use predicted para meters - if (flag[Acts::TrackStateFlag::HoleFlag] == true) { - const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), - state.predicted(), - state.predictedCovariance()); - parm = ConvertActsTrackParameterToATLAS(actsParam, geoCtx); - // auto boundaryCheck = m_boundaryCheckTool->boundaryCheck(*p arm); - typePattern.set(Trk::TrackStateOnSurface::Hole); - } - // The state was tagged as an outlier, use filtered parameters - else if (flag[Acts::TrackStateFlag::OutlierFlag] == true) { - const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), - state.filtered(), state.filteredCovariance()); - parm = ConvertActsTrackParameterToATLAS(actsParam, geoCtx); - typePattern.set(Trk::TrackStateOnSurface::Outlier); - } - // The state is a measurement state, use smoothed parameters - else { - const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), - state.smoothed(), state.smoothedCovariance()); - actsSmoothedParam.push_back(std::make_unique<const Acts::BoundTrackParameters>(Acts::BoundTrackParameters(actsParam))); - // const auto& psurface=actsParam.referenceSurface(); - Acts::Vector2 local(actsParam.parameters()[Acts::eBoundLoc0], actsParam.parameters()[Acts::eBoundLoc1]); - // const Acts::Vector3 dir = Acts::makeDirectionUnitFromPhiTheta(actsParam.parameters()[Acts::eBoundPhi], actsParam.parameters()[Acts::eBoundTheta]); - // auto pos=actsParam.position(tgContext); - parm = ConvertActsTrackParameterToATLAS(actsParam, geoCtx); - typePattern.set(Trk::TrackStateOnSurface::Measurement); - } - Tracker::FaserSCT_ClusterOnTrack* measState = nullptr; - if (state.hasUncalibrated()) { - const Tracker::FaserSCT_Cluster* fitCluster = state.uncalibrated().hit(); - if (fitCluster->detectorElement() != nullptr) { - measState = new Tracker::FaserSCT_ClusterOnTrack{ - fitCluster, - Trk::LocalParameters{ - Trk::DefinedParameter{fitCluster->localPosition()[0], Trk::loc1}, - Trk::DefinedParameter{fitCluster->localPosition()[1], Trk::loc2} - }, - fitCluster->localCovariance(), - m_idHelper->wafer_hash(fitCluster->detectorElement()->identify()) - }; - } - } - double nDoF = state.calibratedSize(); - const Trk::FitQualityOnSurface *quality = new Trk::FitQualityOnSurface(state.chi2(), nDoF); - const Trk::TrackStateOnSurface *perState = new Trk::TrackStateOnSurface(measState, parm, quality, nullptr, typePattern); - // If a state was succesfully created add it to the trajectory - if (perState) { - finalTrajectory->insert(finalTrajectory->begin(), perState); - } - } - return; - }); - - // Create the track using the states - const Trk::TrackInfo newInfo(Trk::TrackInfo::TrackFitter::KalmanFitter, Trk::ParticleHypothesis::muon); - // Trk::FitQuality* q = nullptr; - // newInfo.setTrackFitter(Trk::TrackInfo::TrackFitter::KalmanFitter ); //Mark the fitter as KalmanFitter - newtrack = std::make_unique<Trk::Track>(newInfo, std::move(*finalTrajectory), nullptr); - return newtrack; -} - - -std::unique_ptr<Trk::Track> -CKF2::makeTrack(Acts::GeometryContext& geoCtx, TrackFitterResult& fitResult) const { - using ConstTrackStateProxy = - Acts::detail_lt::TrackStateProxy<IndexSourceLink, 6, true>; - std::unique_ptr<Trk::Track> newtrack = nullptr; - //Get the fit output object - const auto& fitOutput = fitResult.value(); - if (fitOutput.fittedParameters.size() > 0) { - DataVector<const Trk::TrackStateOnSurface>* finalTrajectory = new DataVector<const Trk::TrackStateOnSurface>{}; - std::vector<std::unique_ptr<const Acts::BoundTrackParameters>> actsSmoothedParam; - // Loop over all the output state to create track state - fitOutput.fittedStates.visitBackwards(fitOutput.lastMeasurementIndices.front(), [&](const ConstTrackStateProxy& state) { - auto flag = state.typeFlags(); - if (state.referenceSurface().associatedDetectorElement() != nullptr) { - // We need to determine the type of state - std::bitset<Trk::TrackStateOnSurface::NumberOfTrackStateOnSurfaceTypes> typePattern; - const Trk::TrackParameters *parm; - - // State is a hole (no associated measurement), use predicted para meters - if (flag[Acts::TrackStateFlag::HoleFlag] == true) { - const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), - state.predicted(), - state.predictedCovariance()); - parm = ConvertActsTrackParameterToATLAS(actsParam, geoCtx); - // auto boundaryCheck = m_boundaryCheckTool->boundaryCheck(*p arm); - typePattern.set(Trk::TrackStateOnSurface::Hole); - } - // The state was tagged as an outlier, use filtered parameters - else if (flag[Acts::TrackStateFlag::OutlierFlag] == true) { - const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), - state.filtered(), state.filteredCovariance()); - parm = ConvertActsTrackParameterToATLAS(actsParam, geoCtx); - typePattern.set(Trk::TrackStateOnSurface::Outlier); - } - // The state is a measurement state, use smoothed parameters - else { - const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), - state.smoothed(), state.smoothedCovariance()); - actsSmoothedParam.push_back(std::make_unique<const Acts::BoundTrackParameters>(Acts::BoundTrackParameters(actsParam))); - // const auto& psurface=actsParam.referenceSurface(); - Acts::Vector2 local(actsParam.parameters()[Acts::eBoundLoc0], actsParam.parameters()[Acts::eBoundLoc1]); - // const Acts::Vector3 dir = Acts::makeDirectionUnitFromPhiTheta(actsParam.parameters()[Acts::eBoundPhi], actsParam.parameters()[Acts::eBoundTheta]); - // auto pos=actsParam.position(tgContext); - parm = ConvertActsTrackParameterToATLAS(actsParam, geoCtx); - typePattern.set(Trk::TrackStateOnSurface::Measurement); - } - Tracker::FaserSCT_ClusterOnTrack* measState = nullptr; - if (state.hasUncalibrated()) { - const Tracker::FaserSCT_Cluster* fitCluster = state.uncalibrated().hit(); - if (fitCluster->detectorElement() != nullptr) { - measState = new Tracker::FaserSCT_ClusterOnTrack{ - fitCluster, - Trk::LocalParameters{ - Trk::DefinedParameter{fitCluster->localPosition()[0], Trk::loc1}, - Trk::DefinedParameter{fitCluster->localPosition()[1], Trk::loc2} - }, - fitCluster->localCovariance(), - m_idHelper->wafer_hash(fitCluster->detectorElement()->identify()) - }; - } - } - double nDoF = state.calibratedSize(); - const Trk::FitQualityOnSurface *quality = new Trk::FitQualityOnSurface(state.chi2(), nDoF); - const Trk::TrackStateOnSurface *perState = new Trk::TrackStateOnSurface(measState, parm, quality, nullptr, typePattern); - // If a state was succesfully created add it to the trajectory - if (perState) { - finalTrajectory->insert(finalTrajectory->begin(), perState); - } - } - return; - }); - - // Create the track using the states - const Trk::TrackInfo newInfo(Trk::TrackInfo::TrackFitter::KalmanFitter, Trk::ParticleHypothesis::muon); - // Trk::FitQuality* q = nullptr; - // newInfo.setTrackFitter(Trk::TrackInfo::TrackFitter::KalmanFitter ); //Mark the fitter as KalmanFitter - newtrack = std::make_unique<Trk::Track>(newInfo, std::move(*finalTrajectory), nullptr); - } - return newtrack; -} - -const Trk::TrackParameters* -CKF2::ConvertActsTrackParameterToATLAS(const Acts::BoundTrackParameters &actsParameter, const Acts::GeometryContext& gctx) const { - using namespace Acts::UnitLiterals; - std::optional<AmgSymMatrix(5)> cov = std::nullopt; - if (actsParameter.covariance()){ - AmgSymMatrix(5) newcov(actsParameter.covariance()->topLeftCorner(5, 5)); - // Convert the covariance matrix to GeV - for(int i=0; i < newcov.rows(); i++){ - newcov(i, 4) = newcov(i, 4)*1_MeV; - } - for(int i=0; i < newcov.cols(); i++){ - newcov(4, i) = newcov(4, i)*1_MeV; - } - cov = std::optional<AmgSymMatrix(5)>(newcov); - } - const Amg::Vector3D& pos=actsParameter.position(gctx); - double tphi=actsParameter.get<Acts::eBoundPhi>(); - double ttheta=actsParameter.get<Acts::eBoundTheta>(); - double tqOverP=actsParameter.get<Acts::eBoundQOverP>()*1_MeV; - double p = std::abs(1. / tqOverP); - Amg::Vector3D tmom(p * std::cos(tphi) * std::sin(ttheta), p * std::sin(tphi) * std::sin(ttheta), p * std::cos(ttheta)); - double charge = tqOverP > 0. ? 1. : -1.; - const Trk::CurvilinearParameters * curv = new Trk::CurvilinearParameters(pos,tmom,charge, cov); - return curv; -} void CKF2::computeSharedHits(std::vector<IndexSourceLink>* sourceLinks, TrackFinderResult& results) const { // Compute shared hits from all the reconstructed tracks diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.h b/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.h index c6b42b020766fa0e17b5693a2a0bc266057c6243..2111afcf6b52d46b45e1e2539d105290b937689e 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.h +++ b/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.h @@ -20,6 +20,7 @@ #include "PerformanceWriterTool.h" #include "KalmanFitterTool.h" #include <boost/dynamic_bitset.hpp> +#include "CreateTrkTrackTool.h" using ConstTrackStateProxy = Acts::detail_lt::TrackStateProxy<IndexSourceLink, 6, true>; using ClusterSet = boost::dynamic_bitset<>; @@ -136,6 +137,7 @@ private: Gaudi::Property<double> m_maxSteps {this, "maxSteps", 10000}; Gaudi::Property<double> m_chi2Max {this, "chi2Max", 15}; Gaudi::Property<unsigned long> m_nMax {this, "nMax", 10}; + Gaudi::Property<std::vector<int>> m_maskedLayers {this, "maskedLayers", {}}; SG::ReadCondHandleKey<FaserFieldCacheCondObj> m_fieldCondObjInputKey {this, "FaserFieldCacheCondObj", "fieldCondObj", "Name of the Magnetic Field conditions object key"}; ToolHandle<ITrackSeedTool> m_trackSeedTool {this, "TrackSeed", "ClusterTrackSeedTool"}; ToolHandle<IFaserActsTrackingGeometryTool> m_trackingGeometryTool {this, "TrackingGeometryTool", "FaserActsTrackingGeometryTool"}; @@ -143,14 +145,9 @@ private: ToolHandle<RootTrajectoryStatesWriterTool> m_trajectoryStatesWriterTool {this, "RootTrajectoryStatesWriterTool", "RootTrajectoryStatesWriterTool"}; ToolHandle<RootTrajectorySummaryWriterTool> m_trajectorySummaryWriterTool {this, "RootTrajectorySummaryWriterTool", "RootTrajectorySummaryWriterTool"}; ToolHandle<KalmanFitterTool> m_kalmanFitterTool1 {this, "KalmanFitterTool1", "KalmanFitterTool"}; - ToolHandle<KalmanFitterTool> m_kalmanFitterTool2 {this, "KalmanFitterTool2", "KalmanFitterTool"}; + ToolHandle<CreateTrkTrackTool> m_createTrkTrackTool {this, "CreateTrkTrackTool", "CreateTrkTrackTool"}; Gaudi::Property<bool> m_isMC {this, "isMC", false}; - - std::unique_ptr<Trk::Track> makeTrack(Acts::GeometryContext& tgContext, TrackFitterResult& fitResult) const; - std::unique_ptr<Trk::Track> makeTrack(const Acts::GeometryContext &geoCtx, const FaserActsRecMultiTrajectory &traj) const; - const Trk::TrackParameters* ConvertActsTrackParameterToATLAS(const Acts::BoundTrackParameters &actsParameter, const Acts::GeometryContext& gctx) const; - SG::WriteHandleKey<TrackCollection> m_trackCollection { this, "CKFTrackCollection", "CKFTrackCollection" }; + SG::WriteHandleKey<TrackCollection> m_trackCollection { this, "OutputCollection", "CKFTrackCollection", "Output track collection name" }; }; #endif // FASERACTSKALMANFILTER_CKF2_H - diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/CircleFitTrackSeedTool.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/CircleFitTrackSeedTool.cxx index e7c4f064c0b16408d8a068757be235f12adffaca..066698a478fbea03a6565a056afafd253e449a0d 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/CircleFitTrackSeedTool.cxx +++ b/Tracking/Acts/FaserActsKalmanFilter/src/CircleFitTrackSeedTool.cxx @@ -34,7 +34,7 @@ StatusCode CircleFitTrackSeedTool::initialize() { } -StatusCode CircleFitTrackSeedTool::run() { +StatusCode CircleFitTrackSeedTool::run(std::vector<int> maskedLayers) { SG::ReadHandle<Tracker::FaserSCT_ClusterContainer> clusterContainer {m_clusterContainerKey}; ATH_CHECK(clusterContainer.isValid()); @@ -76,6 +76,9 @@ StatusCode CircleFitTrackSeedTool::run() { for (const Tracker::FaserSCT_Cluster* cluster : *clusterCollection) { clusters.push_back(cluster); Identifier id = cluster->detectorElement()->identify(); + if (std::find(maskedLayers.begin(), maskedLayers.end(), 3 * m_idHelper->station(id) + m_idHelper->layer(id)) != maskedLayers.end()) { + continue; + } CircleFitTrackSeedTool::s_indexMap[cluster->identify()] = measurements.size(); if (identifierMap->count(id) != 0) { Acts::GeometryIdentifier geoId = identifierMap->at(id); @@ -96,8 +99,8 @@ StatusCode CircleFitTrackSeedTool::run() { std::array<std::vector<Segment>, 4> segments {}; for (const Trk::Track* track : *trackCollection) { - auto s = Segment(track, m_idHelper); - segments[s.station].push_back(s); + auto s = Segment(track, m_idHelper, maskedLayers); + if (s.station != -1) segments[s.station].push_back(s); } std::vector<Segment> combination {}; @@ -162,32 +165,6 @@ StatusCode CircleFitTrackSeedTool::run() { Acts::Vector3 {0, 0, origin}, Acts::Vector3{0, 0, -1}); m_clusters = std::make_shared<std::vector<const Tracker::FaserSCT_Cluster*>>(clusters); m_spacePoints = std::make_shared<std::vector<const Tracker::FaserSCT_SpacePoint*>>(spacePoints); - - /* - std::cout.precision(17); - for (auto &seed : selectedSeeds) { - std::cout << "np.array(["; - for (const Acts::Vector3 &pos : seed.fakePositions) { - std::cout << "[" << pos.x() << ", " << pos.y() << ", " << pos.z() << "], "; - } - std::cout << "])" << std::endl; - std::cout << "chi2: " << seed.chi2 << ", momentum: " << seed.momentum << ", charge: " << seed.charge << std::endl; - std::cout << "fit = np.array([" << seed.c1 << ", " << seed.c0 << ", " << seed.cy << ", " << seed.cx << ", " << seed.r << "])" << std::endl; - - std::vector<ParticleHitCount> particleHitCounts; - identifyContributingParticles(*simData, seed.clusters, particleHitCounts); - auto ip = particles.find(particleHitCounts.front().particleId); - if (ip != particles.end()) { - const HepMC::GenParticle* particle = ip->second; - HepMC::FourVector momentum = particle->momentum(); - std::cout << "true momentum: " << momentum.rho() * 0.001 << std::endl; - } - for (const ParticleHitCount &hitCount : particleHitCounts) { - std::cout << hitCount.particleId << " : " << hitCount.hitCount << std::endl; - } - } - */ - s_indexMap.clear(); return StatusCode::SUCCESS; @@ -216,13 +193,16 @@ void CircleFitTrackSeedTool::go(const std::array<std::vector<Segment>, 4> &v, } } -CircleFitTrackSeedTool::Segment::Segment(const Trk::Track* track, const FaserSCT_ID *idHelper) : +CircleFitTrackSeedTool::Segment::Segment(const Trk::Track* track, const FaserSCT_ID *idHelper, const std::vector<int> &maskedLayers) : clusterSet(CircleFitTrackSeedTool::s_indexMap.size()) { for (const Trk::TrackStateOnSurface* trackState : *(track->trackStateOnSurfaces())) { auto clusterOnTrack = dynamic_cast<const Tracker::FaserSCT_ClusterOnTrack*> (trackState->measurementOnTrack()); if (clusterOnTrack) { const Tracker::FaserSCT_Cluster* cluster = clusterOnTrack->prepRawData(); Identifier id = cluster->identify(); + if (std::find(maskedLayers.begin(), maskedLayers.end(), 3 * idHelper->station(id) + idHelper->layer(id)) != maskedLayers.end()) { + continue; + } clusters.push_back(cluster); if (CircleFitTrackSeedTool::s_spacePointMap.count(id) > 0) { const Tracker::FaserSCT_SpacePoint *sp = CircleFitTrackSeedTool::s_spacePointMap.at(cluster->identify()); diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/CircleFitTrackSeedTool.h b/Tracking/Acts/FaserActsKalmanFilter/src/CircleFitTrackSeedTool.h index 368abb1f228e45e69d0c38da02e63dc65c15fb4d..7866b89fc32fe2ccd8a0391f421b1d203ff1e8e0 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/CircleFitTrackSeedTool.h +++ b/Tracking/Acts/FaserActsKalmanFilter/src/CircleFitTrackSeedTool.h @@ -30,7 +30,7 @@ public: virtual ~CircleFitTrackSeedTool() = default; virtual StatusCode initialize() override; virtual StatusCode finalize() override; - virtual StatusCode run() override; + virtual StatusCode run(std::vector<int> maskedLayers = {}) override; const std::shared_ptr<std::vector<Acts::CurvilinearTrackParameters>> initialTrackParameters() const override; const std::shared_ptr<const Acts::Surface> initialSurface() const override; @@ -45,8 +45,8 @@ public: private: struct Segment { public: - Segment(const Trk::Track* track, const FaserSCT_ID *idHelper); - int station; + Segment(const Trk::Track* track, const FaserSCT_ID *idHelper, const std::vector<int> &maskedLayers = {}); + int station = -1; std::vector<const Tracker::FaserSCT_Cluster*> clusters; std::vector<const Tracker::FaserSCT_SpacePoint*> spacePoints; ClusterSet clusterSet; diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/CreateTrkTrackTool.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/CreateTrkTrackTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..4928fc917e2b0bfe73e38848939690680fc3c19a --- /dev/null +++ b/Tracking/Acts/FaserActsKalmanFilter/src/CreateTrkTrackTool.cxx @@ -0,0 +1,106 @@ +#include "TrackerRIO_OnTrack/FaserSCT_ClusterOnTrack.h" +#include "CreateTrkTrackTool.h" + +#include <Acts/EventData/MultiTrajectoryHelpers.hpp> +#include "TrackerIdentifier/FaserSCT_ID.h" + +CreateTrkTrackTool::CreateTrkTrackTool(const std::string& type, const std::string& name, const IInterface* parent) : + AthAlgTool( type, name, parent ) {} + +StatusCode CreateTrkTrackTool::initialize() { + ATH_CHECK(detStore()->retrieve(m_idHelper,"FaserSCT_ID")); + return StatusCode::SUCCESS; +} + +StatusCode CreateTrkTrackTool::finalize() { + return StatusCode::SUCCESS; +} + +std::unique_ptr<Trk::Track> +CreateTrkTrackTool::createTrack(const Acts::GeometryContext &gctx, const FaserActsRecMultiTrajectory &traj) const { + std::unique_ptr<Trk::Track> newtrack = nullptr; + DataVector<const Trk::TrackStateOnSurface>* finalTrajectory = new DataVector<const Trk::TrackStateOnSurface>{}; + using ConstTrackStateProxy = Acts::detail_lt::TrackStateProxy<IndexSourceLink, 6, true>; + auto trajState = Acts::MultiTrajectoryHelpers::trajectoryState(traj.multiTrajectory(), traj.tips().front()); + traj.multiTrajectory().visitBackwards(traj.tips().front(), [&](const ConstTrackStateProxy& state) { + auto flag = state.typeFlags(); + if (state.referenceSurface().associatedDetectorElement() != nullptr) { + std::bitset<Trk::TrackStateOnSurface::NumberOfTrackStateOnSurfaceTypes> typePattern; + const Trk::TrackParameters *parm; + + if (flag[Acts::TrackStateFlag::HoleFlag]) { + const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), + state.predicted(), state.predictedCovariance()); + parm = ConvertActsTrackParameterToATLAS(actsParam, gctx); + typePattern.set(Trk::TrackStateOnSurface::Hole); + } + else if (flag[Acts::TrackStateFlag::OutlierFlag]) { + const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), + state.filtered(), state.filteredCovariance()); + parm = ConvertActsTrackParameterToATLAS(actsParam, gctx); + typePattern.set(Trk::TrackStateOnSurface::Outlier); + } + else { + const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), + state.smoothed(), state.smoothedCovariance()); + parm = ConvertActsTrackParameterToATLAS(actsParam, gctx); + typePattern.set(Trk::TrackStateOnSurface::Measurement); + } + Tracker::FaserSCT_ClusterOnTrack* clusterOnTrack = nullptr; + if (state.hasUncalibrated()) { + const Tracker::FaserSCT_Cluster* cluster = state.uncalibrated().hit(); + if (cluster->detectorElement() != nullptr) { + clusterOnTrack = new Tracker::FaserSCT_ClusterOnTrack{ + cluster, + Trk::LocalParameters{ + Trk::DefinedParameter{cluster->localPosition()[0], Trk::loc1}, + Trk::DefinedParameter{cluster->localPosition()[1], Trk::loc2} + }, + cluster->localCovariance(), + m_idHelper->wafer_hash(cluster->detectorElement()->identify()) + }; + } + } + const Trk::TrackStateOnSurface *perState = new Trk::TrackStateOnSurface(clusterOnTrack, parm); + if (perState) { + finalTrajectory->insert(finalTrajectory->begin(), perState); + } + } + return; + }); + + Trk::FitQuality* q = new Trk::FitQuality {trajState.chi2Sum, static_cast<double>(trajState.nMeasurements - 5)}; + Trk::TrackInfo newInfo(Trk::TrackInfo::TrackFitter::KalmanFitter, Trk::ParticleHypothesis::muon); + std::unique_ptr<DataVector<const Trk::TrackStateOnSurface>> sink(finalTrajectory); + // newtrack = std::make_unique<Trk::Track>(newInfo, std::move(*finalTrajectory), quality); + Trk::Track* tmpTrack = new Trk::Track(newInfo, std::move(*sink), q); + newtrack = std::unique_ptr<Trk::Track>(tmpTrack); + return newtrack; +} + + +const Trk::TrackParameters* +CreateTrkTrackTool::ConvertActsTrackParameterToATLAS(const Acts::BoundTrackParameters &actsParameter, const Acts::GeometryContext& gctx) const { + using namespace Acts::UnitLiterals; + std::optional<AmgSymMatrix(5)> cov = std::nullopt; + if (actsParameter.covariance()){ + AmgSymMatrix(5) newcov(actsParameter.covariance()->topLeftCorner(5, 5)); + // Convert the covariance matrix to GeV + for(int i=0; i < newcov.rows(); i++){ + newcov(i, 4) = newcov(i, 4)*1_MeV; + } + for(int i=0; i < newcov.cols(); i++){ + newcov(4, i) = newcov(4, i)*1_MeV; + } + cov = std::optional<AmgSymMatrix(5)>(newcov); + } + const Amg::Vector3D& pos=actsParameter.position(gctx); + double tphi=actsParameter.get<Acts::eBoundPhi>(); + double ttheta=actsParameter.get<Acts::eBoundTheta>(); + double tqOverP=actsParameter.get<Acts::eBoundQOverP>()*1_MeV; + double p = std::abs(1. / tqOverP); + Amg::Vector3D tmom(p * std::cos(tphi) * std::sin(ttheta), p * std::sin(tphi) * std::sin(ttheta), p * std::cos(ttheta)); + double charge = tqOverP > 0. ? 1. : -1.; + const Trk::CurvilinearParameters * curv = new Trk::CurvilinearParameters(pos,tmom,charge, cov); + return curv; +} diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/CreateTrkTrackTool.h b/Tracking/Acts/FaserActsKalmanFilter/src/CreateTrkTrackTool.h new file mode 100644 index 0000000000000000000000000000000000000000..d0edfd5cb9aedb78d7fde03104377f3ff9aa5d1f --- /dev/null +++ b/Tracking/Acts/FaserActsKalmanFilter/src/CreateTrkTrackTool.h @@ -0,0 +1,27 @@ +#ifndef FASERACTSKALMANFILTER_CREATETRKTRACKTOOL_H +#define FASERACTSKALMANFILTER_CREATETRKTRACKTOOL_H + +#include "TrackerPrepRawData/FaserSCT_ClusterContainer.h" +#include "AthenaBaseComps/AthAlgTool.h" +#include "TrkTrack/Track.h" +#include "TrkParameters/TrackParameters.h" +#include "Acts/Geometry/GeometryContext.hpp" +#include "Acts/EventData/TrackParameters.hpp" +#include "FaserActsRecMultiTrajectory.h" + + +class FaserSCT_ID; + +class CreateTrkTrackTool: public AthAlgTool { +public: + CreateTrkTrackTool(const std::string &type, const std::string &name, const IInterface *parent); + virtual ~CreateTrkTrackTool() = default; + virtual StatusCode initialize() override; + virtual StatusCode finalize() override; + std::unique_ptr<Trk::Track> createTrack(const Acts::GeometryContext &gctx, const FaserActsRecMultiTrajectory &traj) const; + const Trk::TrackParameters* ConvertActsTrackParameterToATLAS(const Acts::BoundTrackParameters &actsParameter, const Acts::GeometryContext& gctx) const; +private: + const FaserSCT_ID* m_idHelper {nullptr}; +}; + +#endif //FASERACTSKALMANFILTER_CREATETRKTRACKTOOL_H \ No newline at end of file diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/KalmanFitterTool.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/KalmanFitterTool.cxx index 6bb846781ad859824897eed70486cea903897a66..051687e21c6d0b1c1874a26ac4ddeb5d55c55124 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/KalmanFitterTool.cxx +++ b/Tracking/Acts/FaserActsKalmanFilter/src/KalmanFitterTool.cxx @@ -19,6 +19,7 @@ KalmanFitterTool::KalmanFitterTool(const std::string& type, const std::string& n StatusCode KalmanFitterTool::initialize() { ATH_CHECK(m_fieldCondObjInputKey.initialize()); ATH_CHECK(m_trackingGeometryTool.retrieve()); + ATH_CHECK(m_createTrkTrackTool.retrieve()); ATH_CHECK(detStore()->retrieve(m_idHelper,"FaserSCT_ID")); if (m_statesWriter && !m_noDiagnostics) ATH_CHECK(m_trajectoryStatesWriterTool.retrieve()); if (m_summaryWriter && !m_noDiagnostics) ATH_CHECK(m_trajectorySummaryWriterTool.retrieve()); @@ -39,18 +40,18 @@ StatusCode KalmanFitterTool::finalize() { } std::unique_ptr<Trk::Track> -KalmanFitterTool::fit(const EventContext &ctx, const Acts::GeometryContext &gctx, const Trk::Track &inputTrack, - std::vector<FaserActsRecMultiTrajectory> & /*trajectories*/, - const Acts::BoundVector& inputVector, bool isMC, double origin) const { +KalmanFitterTool::fit(const EventContext &ctx, const Acts::GeometryContext &gctx, + Trk::Track* inputTrack, const Acts::BoundVector& inputVector, + bool isMC, double origin) const { std::unique_ptr<Trk::Track> newTrack = nullptr; std::vector<FaserActsRecMultiTrajectory> myTrajectories; - if (!inputTrack.measurementsOnTrack() || inputTrack.measurementsOnTrack()->size() < m_minMeasurements) { + if (!inputTrack->measurementsOnTrack() || inputTrack->measurementsOnTrack()->size() < m_minMeasurements) { ATH_MSG_DEBUG("Input track has no or too little measurements and cannot be fitted"); return nullptr; } - if (!inputTrack.trackParameters() || inputTrack.trackParameters()->empty()) { + if (!inputTrack->trackParameters() || inputTrack->trackParameters()->empty()) { ATH_MSG_DEBUG("Input track has no track parameters and cannot be fitted"); return nullptr; } @@ -63,7 +64,7 @@ KalmanFitterTool::fit(const EventContext &ctx, const Acts::GeometryContext &gctx Acts::CalibrationContext calibContext = Acts::CalibrationContext(); auto [sourceLinks, measurements] = getMeasurementsFromTrack(inputTrack); - auto trackParameters = getParametersFromTrack(inputTrack.trackParameters()->front(), inputVector, origin); + auto trackParameters = getParametersFromTrack(inputTrack->trackParameters()->front(), inputVector, origin); ATH_MSG_DEBUG("trackParameters: " << trackParameters.parameters().transpose()); ATH_MSG_DEBUG("position: " << trackParameters.position(gctx).transpose()); ATH_MSG_DEBUG("momentum: " << trackParameters.momentum().transpose()); @@ -103,7 +104,7 @@ KalmanFitterTool::fit(const EventContext &ctx, const Acts::GeometryContext &gctx } else { ATH_MSG_DEBUG("No fitted parameters for track"); } - newTrack = makeTrack(gctx, result); + newTrack = m_createTrkTrackTool->createTrack(gctx, myTrajectories.back()); } if (m_statesWriter && !m_noDiagnostics) { @@ -172,7 +173,7 @@ Acts::MagneticFieldContext KalmanFitterTool::getMagneticFieldContext(const Event std::tuple<std::vector<IndexSourceLink>, std::vector<Measurement>> -KalmanFitterTool::getMeasurementsFromTrack(const Trk::Track &track) const { +KalmanFitterTool::getMeasurementsFromTrack(Trk::Track *track) const { const int kSize = 1; std::array<Acts::BoundIndices, kSize> Indices = {Acts::eBoundLoc0}; using ThisMeasurement = Acts::Measurement<IndexSourceLink, Acts::BoundIndices, kSize>; @@ -181,7 +182,7 @@ KalmanFitterTool::getMeasurementsFromTrack(const Trk::Track &track) const { std::shared_ptr<IdentifierMap> identifierMap = m_trackingGeometryTool->getIdentifierMap(); std::vector<IndexSourceLink> sourceLinks; std::vector<Measurement> measurements; - for (const Trk::MeasurementBase *meas : *track.measurementsOnTrack()) { + for (const Trk::MeasurementBase *meas : *track->measurementsOnTrack()) { const auto* clusterOnTrack = dynamic_cast<const Tracker::FaserSCT_ClusterOnTrack*>(meas); const Tracker::FaserSCT_Cluster* cluster = clusterOnTrack->prepRawData(); if (clusterOnTrack) { @@ -241,113 +242,3 @@ KalmanFitterTool::getParametersFromTrack(const Trk::TrackParameters *inputParame return Acts::BoundTrackParameters(pSurface, params, inputParameters->charge(), cov); } - - -std::unique_ptr<Trk::Track> -KalmanFitterTool::makeTrack(const Acts::GeometryContext &geoCtx, TrackFitterResult& fitResult) const { - using ConstTrackStateProxy = - Acts::detail_lt::TrackStateProxy<IndexSourceLink, 6, true>; - std::unique_ptr<Trk::Track> newtrack = nullptr; - //Get the fit output object - const auto& fitOutput = fitResult.value(); - if (fitOutput.fittedParameters) { - DataVector<const Trk::TrackStateOnSurface>* finalTrajectory = new DataVector<const Trk::TrackStateOnSurface>{}; - std::vector<std::unique_ptr<const Acts::BoundTrackParameters>> actsSmoothedParam; - // Loop over all the output state to create track state - fitOutput.fittedStates.visitBackwards(fitOutput.lastMeasurementIndex, [&](const ConstTrackStateProxy& state) { - auto flag = state.typeFlags(); - if (state.referenceSurface().associatedDetectorElement() != nullptr) { - // We need to determine the type of state - std::bitset<Trk::TrackStateOnSurface::NumberOfTrackStateOnSurfaceTypes> typePattern; - const Trk::TrackParameters *parm; - - // State is a hole (no associated measurement), use predicted para meters - if (flag[Acts::TrackStateFlag::HoleFlag] == true) { - const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), - state.predicted(), - state.predictedCovariance()); - parm = ConvertActsTrackParameterToATLAS(actsParam, geoCtx); - // auto boundaryCheck = m_boundaryCheckTool->boundaryCheck(*p arm); - typePattern.set(Trk::TrackStateOnSurface::Hole); - } - // The state was tagged as an outlier, use filtered parameters - else if (flag[Acts::TrackStateFlag::OutlierFlag] == true) { - const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), - state.filtered(), state.filteredCovariance()); - parm = ConvertActsTrackParameterToATLAS(actsParam, geoCtx); - typePattern.set(Trk::TrackStateOnSurface::Outlier); - } - // The state is a measurement state, use smoothed parameters - else { - const Acts::BoundTrackParameters actsParam(state.referenceSurface().getSharedPtr(), - state.smoothed(), state.smoothedCovariance()); - actsSmoothedParam.push_back(std::make_unique<const Acts::BoundTrackParameters>(Acts::BoundTrackParameters(actsParam))); - // const auto& psurface=actsParam.referenceSurface(); - // Acts::Vector2 local(actsParam.parameters()[Acts::eBoundLoc0], actsParam.parameters()[Acts::eBoundLoc1]); - // const Acts::Vector3 dir = Acts::makeDirectionUnitFromPhiTheta(actsParam.parameters()[Acts::eBoundPhi], actsParam.parameters()[Acts::eBoundTheta]); - // auto pos=actsParam.position(tgContext); - parm = ConvertActsTrackParameterToATLAS(actsParam, geoCtx); - typePattern.set(Trk::TrackStateOnSurface::Measurement); - } - Tracker::FaserSCT_ClusterOnTrack* measState = nullptr; - if (state.hasUncalibrated()) { - const Tracker::FaserSCT_Cluster* fitCluster = state.uncalibrated().hit(); - if (fitCluster->detectorElement() != nullptr) { - measState = new Tracker::FaserSCT_ClusterOnTrack{ - fitCluster, - Trk::LocalParameters{ - Trk::DefinedParameter{fitCluster->localPosition()[0], Trk::loc1}, - Trk::DefinedParameter{fitCluster->localPosition()[1], Trk::loc2} - }, - fitCluster->localCovariance(), - m_idHelper->wafer_hash(fitCluster->detectorElement()->identify()) - }; - } - } - double nDoF = state.calibratedSize(); - const Trk::FitQualityOnSurface *quality = new Trk::FitQualityOnSurface(state.chi2(), nDoF); - const Trk::TrackStateOnSurface *perState = new Trk::TrackStateOnSurface(measState, parm, quality, nullptr, typePattern); - // If a state was successfully created add it to the trajectory - if (perState) { - finalTrajectory->insert(finalTrajectory->begin(), perState); - } - } - return; - }); - - // Create the track using the states - const Trk::TrackInfo newInfo(Trk::TrackInfo::TrackFitter::KalmanFitter, Trk::ParticleHypothesis::muon); - // Trk::FitQuality* q = nullptr; - // newInfo.setTrackFitter(Trk::TrackInfo::TrackFitter::KalmanFitter ); //Mark the fitter as KalmanFitter - newtrack = std::make_unique<Trk::Track>(newInfo, std::move(*finalTrajectory), nullptr); - } - return newtrack; -} - - -const Trk::TrackParameters* -KalmanFitterTool::ConvertActsTrackParameterToATLAS(const Acts::BoundTrackParameters &actsParameter, const Acts::GeometryContext& gctx) const { - using namespace Acts::UnitLiterals; - std::optional<AmgSymMatrix(5)> cov = std::nullopt; - if (actsParameter.covariance()){ - AmgSymMatrix(5) newcov(actsParameter.covariance()->topLeftCorner(5, 5)); - // Convert the covariance matrix to GeV - for(int i=0; i < newcov.rows(); i++){ - newcov(i, 4) = newcov(i, 4)*1_MeV; - } - for(int i=0; i < newcov.cols(); i++){ - newcov(4, i) = newcov(4, i)*1_MeV; - } - cov = std::optional<AmgSymMatrix(5)>(newcov); - } - const Amg::Vector3D& pos=actsParameter.position(gctx); - double tphi=actsParameter.get<Acts::eBoundPhi>(); - double ttheta=actsParameter.get<Acts::eBoundTheta>(); - double tqOverP=actsParameter.get<Acts::eBoundQOverP>()*1_MeV; - double p = std::abs(1. / tqOverP); - Amg::Vector3D tmom(p * std::cos(tphi) * std::sin(ttheta), p * std::sin(tphi) * std::sin(ttheta), p * std::cos(ttheta)); - double charge = tqOverP > 0. ? 1. : -1.; - const Trk::CurvilinearParameters * curv = new Trk::CurvilinearParameters(pos,tmom,charge, cov); - return curv; -} - diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/KalmanFitterTool.h b/Tracking/Acts/FaserActsKalmanFilter/src/KalmanFitterTool.h index 9ed5b212266e29b0d6155bed1580a2c0397e036c..0894e3ab2139decc1f0950a89f6200dc9ea2930f 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/KalmanFitterTool.h +++ b/Tracking/Acts/FaserActsKalmanFilter/src/KalmanFitterTool.h @@ -13,6 +13,8 @@ #include "RootTrajectoryStatesWriterTool.h" #include "RootTrajectorySummaryWriterTool.h" #include "TrkTrack/Track.h" +#include "TrkTrack/TrackCollection.h" +#include "CreateTrkTrackTool.h" class FaserSCT_ID; @@ -38,16 +40,16 @@ public: }; static std::shared_ptr<TrackFitterFunction> makeTrackFitterFunction( std::shared_ptr<const Acts::TrackingGeometry> trackingGeometry); - virtual Acts::MagneticFieldContext getMagneticFieldContext(const EventContext& ctx) const; - std::unique_ptr<Trk::Track> fit(const EventContext &ctx, const Acts::GeometryContext &gctx, const Trk::Track &inputTrack, - std::vector<FaserActsRecMultiTrajectory> & /*trajectories*/, - const Acts::BoundVector& inputVector = Acts::BoundVector::Zero(), bool isMC=false, double origin=0) const; + std::unique_ptr<Trk::Track> fit(const EventContext &ctx, const Acts::GeometryContext &gctx, + Trk::Track *inputTrack, + const Acts::BoundVector& inputVector = Acts::BoundVector::Zero(), + bool isMC=false, double origin=0) const; private: const FaserSCT_ID* m_idHelper {nullptr}; std::tuple<std::vector<IndexSourceLink>, std::vector<Measurement>> - getMeasurementsFromTrack(const Trk::Track &track) const; + getMeasurementsFromTrack(Trk::Track *track) const; // Acts::BoundTrackParameters getParametersFromTrack(const Acts::BoundVector& params, const Trk::TrackParameters *inputParameters) const; Acts::BoundTrackParameters getParametersFromTrack(const Trk::TrackParameters *inputParameters, const Acts::BoundVector& inputVector, double origin) const; std::shared_ptr<TrackFitterFunction> m_fit; @@ -64,9 +66,7 @@ private: ToolHandle<IFaserActsTrackingGeometryTool> m_trackingGeometryTool {this, "TrackingGeometryTool", "FaserActsTrackingGeometryTool"}; ToolHandle<RootTrajectoryStatesWriterTool> m_trajectoryStatesWriterTool {this, "RootTrajectoryStatesWriterTool", "RootTrajectoryStatesWriterTool"}; ToolHandle<RootTrajectorySummaryWriterTool> m_trajectorySummaryWriterTool {this, "RootTrajectorySummaryWriterTool", "RootTrajectorySummaryWriterTool"}; - std::unique_ptr<Trk::Track> makeTrack(const Acts::GeometryContext &gctx, TrackFitterResult& fitResult) const; - const Trk::TrackParameters* ConvertActsTrackParameterToATLAS(const Acts::BoundTrackParameters &actsParameter, const Acts::GeometryContext& gctx) const; + ToolHandle<CreateTrkTrackTool> m_createTrkTrackTool {this, "CreateTrkTrackTool", "CreateTrkTrackTool"}; }; #endif //FASERACTSKALMANFILTER_KALMANFITTERTOOL_H - diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/MyTrackSeedTool.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/MyTrackSeedTool.cxx index 28e4c094c27c552c9ebaf1c7fc36d82e6d3fda4c..b5a5e04895ed443cb4e868c4ce722ac45fe65f2f 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/MyTrackSeedTool.cxx +++ b/Tracking/Acts/FaserActsKalmanFilter/src/MyTrackSeedTool.cxx @@ -26,7 +26,7 @@ StatusCode MyTrackSeedTool::initialize() { } -StatusCode MyTrackSeedTool::run() { +StatusCode MyTrackSeedTool::run(std::vector<int> /*maskedLayers*/) { // create track seeds for multiple tracks SG::ReadHandle<TrackCollection> trackCollection {m_trackCollection}; diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/MyTrackSeedTool.h b/Tracking/Acts/FaserActsKalmanFilter/src/MyTrackSeedTool.h index abc4e987434c536b37a20862bef87fc5fe8cbbac..df545c34abfd37e8b778a0c0be5b23852dbb7b7b 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/MyTrackSeedTool.h +++ b/Tracking/Acts/FaserActsKalmanFilter/src/MyTrackSeedTool.h @@ -25,7 +25,7 @@ public: virtual ~MyTrackSeedTool() = default; virtual StatusCode initialize() override; virtual StatusCode finalize() override; - virtual StatusCode run() override; + virtual StatusCode run(std::vector<int> /*maskedLayers*/) override; const std::shared_ptr<std::vector<Acts::CurvilinearTrackParameters>> initialTrackParameters() const override; const std::shared_ptr<const Acts::Surface> initialSurface() const override; diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/ThreeStationTrackSeedTool.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/ThreeStationTrackSeedTool.cxx index be4918327055b7267255e1a54f890513c697ab14..15db300108f9090ba35c6b09ddc66a9b6cf7513a 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/ThreeStationTrackSeedTool.cxx +++ b/Tracking/Acts/FaserActsKalmanFilter/src/ThreeStationTrackSeedTool.cxx @@ -23,7 +23,7 @@ StatusCode ThreeStationTrackSeedTool::initialize() { } -StatusCode ThreeStationTrackSeedTool::run() { +StatusCode ThreeStationTrackSeedTool::run(std::vector<int> /*maskedLayers*/) { // create track seeds for multiple tracks SG::ReadHandle<TrackCollection> trackCollection {m_trackCollection}; ATH_CHECK(trackCollection.isValid()); diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/ThreeStationTrackSeedTool.h b/Tracking/Acts/FaserActsKalmanFilter/src/ThreeStationTrackSeedTool.h index 22cbf9af8420d5100635bbc0f6a00f760e111097..deab07bb12c5b061376975ba5d984f95b15da71e 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/src/ThreeStationTrackSeedTool.h +++ b/Tracking/Acts/FaserActsKalmanFilter/src/ThreeStationTrackSeedTool.h @@ -23,7 +23,7 @@ public: virtual ~ThreeStationTrackSeedTool() = default; virtual StatusCode initialize() override; virtual StatusCode finalize() override; - virtual StatusCode run() override; + virtual StatusCode run(std::vector<int> /*maskedLayers*/) override; const std::shared_ptr<std::vector<Acts::CurvilinearTrackParameters>> initialTrackParameters() const override; const std::shared_ptr<const Acts::Surface> initialSurface() const override; diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/components/FaserActsKalmanFilter_entries.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/components/FaserActsKalmanFilter_entries.cxx index dae1d0d17b2e1d86a3b16f813dcd1a54d46735bd..967f80641f7846d303be4632c252ca30eaab6436 100755 --- a/Tracking/Acts/FaserActsKalmanFilter/src/components/FaserActsKalmanFilter_entries.cxx +++ b/Tracking/Acts/FaserActsKalmanFilter/src/components/FaserActsKalmanFilter_entries.cxx @@ -28,6 +28,7 @@ #include "../SeedingAlg.h" #include "../CircleFitTrackSeedTool.h" #include "../GhostBusters.h" +#include "../CreateTrkTrackTool.h" DECLARE_COMPONENT(FaserActsKalmanFilterAlg) DECLARE_COMPONENT(CombinatorialKalmanFilterAlg) @@ -55,3 +56,4 @@ DECLARE_COMPONENT(MyTrackSeedTool) DECLARE_COMPONENT(SeedingAlg) DECLARE_COMPONENT(CircleFitTrackSeedTool) DECLARE_COMPONENT(GhostBusters) +DECLARE_COMPONENT(CreateTrkTrackTool) diff --git a/Tracking/Acts/FaserActsKalmanFilter/test/CKF2.py b/Tracking/Acts/FaserActsKalmanFilter/test/CKF2.py index afcef75d6ce5d335da6504c21ddf44ae8fc507e2..27c50d9a8b397776ccf19e810ff981afa47a6b67 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/test/CKF2.py +++ b/Tracking/Acts/FaserActsKalmanFilter/test/CKF2.py @@ -8,6 +8,7 @@ from CalypsoConfiguration.AllConfigFlags import ConfigFlags from CalypsoConfiguration.MainServicesConfig import MainServicesCfg from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg from AthenaPoolCnvSvc.PoolWriteConfig import PoolWriteCfg +from FaserGeoModel.FaserGeoModelConfig import FaserGeometryCfg from TrackerPrepRawDataFormation.TrackerPrepRawDataFormationConfig import FaserSCT_ClusterizationCfg from TrackerSpacePointFormation.TrackerSpacePointFormationConfig import TrackerSpacePointFinderCfg from TrackerSegmentFit.TrackerSegmentFitConfig import SegmentFitAlgCfg @@ -30,11 +31,12 @@ ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02" ConfigFlags.GeoModel.FaserVersion = "FASERNU-03" ConfigFlags.GeoModel.Align.Dynamic = False ConfigFlags.Beam.NumberOfCollisions = 0. -ConfigFlags.TrackingGeometry.MaterialSource = "Input" +ConfigFlags.TrackingGeometry.MaterialSource = "geometry-maps.json" ConfigFlags.Input.isMC = True ConfigFlags.lock() acc = MainServicesCfg(ConfigFlags) +acc.merge(FaserGeometryCfg(ConfigFlags)) acc.merge(PoolReadCfg(ConfigFlags)) acc.merge(PoolWriteCfg(ConfigFlags)) @@ -42,7 +44,7 @@ acc.merge(FaserSCT_ClusterizationCfg(ConfigFlags)) acc.merge(TrackerSpacePointFinderCfg(ConfigFlags)) acc.merge(SegmentFitAlgCfg(ConfigFlags, SharedHitFraction=0.61, MinClustersPerFit=5, TanThetaXZCut=0.083)) acc.merge(GhostBustersCfg(ConfigFlags)) -acc.merge(CKF2Cfg(ConfigFlags, noDiagnostics=True)) +acc.merge(CKF2Cfg(ConfigFlags)) # acc.getEventAlgo("CKF2").OutputLevel = DEBUG # logging.getLogger('forcomps').setLevel(VERBOSE) diff --git a/Tracking/Acts/FaserActsKalmanFilter/test/TI12CKF2.py b/Tracking/Acts/FaserActsKalmanFilter/test/TI12CKF2.py index 5c9deac421928f513657ce2c161e11b3dc15dd21..404df8b339d95fa6be277e62d755415fd8eb0353 100644 --- a/Tracking/Acts/FaserActsKalmanFilter/test/TI12CKF2.py +++ b/Tracking/Acts/FaserActsKalmanFilter/test/TI12CKF2.py @@ -8,6 +8,7 @@ from CalypsoConfiguration.AllConfigFlags import ConfigFlags from CalypsoConfiguration.MainServicesConfig import MainServicesCfg from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg # from AthenaPoolCnvSvc.PoolWriteConfig import PoolWriteCfg +from FaserGeoModel.FaserGeoModelConfig import FaserGeometryCfg from AthenaPoolCnvSvc.PoolWriteConfig import PoolWriteCfg from FaserByteStreamCnvSvc.FaserByteStreamCnvSvcConfig import FaserByteStreamCnvSvcCfg from TrackerPrepRawDataFormation.TrackerPrepRawDataFormationConfig import FaserSCT_ClusterizationCfg diff --git a/Waveform/WaveRecAlgs/python/WaveRecAlgsConfig.py b/Waveform/WaveRecAlgs/python/WaveRecAlgsConfig.py index 414773bf9dc2e9b8140b33d81a80220c4288e4bf..b02a8b7d2b9b22193ff2a7d5e01efa36a89730cc 100644 --- a/Waveform/WaveRecAlgs/python/WaveRecAlgsConfig.py +++ b/Waveform/WaveRecAlgs/python/WaveRecAlgsConfig.py @@ -19,11 +19,13 @@ def WaveformReconstructionCfg(flags): if not flags.Input.isMC: acc.merge(WaveformClockRecCfg(flags, "ClockRecAlg")) - acc.merge(WaveformHitRecCfg(flags, "TriggerWaveformRecAlg", "Trigger")) + if "TB" not in flags.GeoModel.FaserVersion: + acc.merge(WaveformHitRecCfg(flags, "TriggerWaveformRecAlg", "Trigger")) + acc.merge(WaveformHitRecCfg(flags, "VetoNuWaveformRecAlg", "VetoNu")) + acc.merge(WaveformHitRecCfg(flags, "VetoWaveformRecAlg", "Veto")) acc.merge(WaveformHitRecCfg(flags, "PreshowerWaveformRecAlg", "Preshower")) acc.merge(WaveformHitRecCfg(flags, "CaloWaveformRecAlg", "Calo")) - acc.merge(WaveformHitRecCfg(flags, "VetoNuWaveformRecAlg", "VetoNu")) acc.merge(WaveformTimingCfg(flags)) diff --git a/Waveform/WaveformConditions/WaveCondUtils/scripts/makeTimingDB.py b/Waveform/WaveformConditions/WaveCondUtils/scripts/makeTimingDB.py index b3a1c64a23ad23ae70eaa000e3cbd76064bebfcd..57c3390c087b7f41850716d6ec24aa7d7e7044f6 100755 --- a/Waveform/WaveformConditions/WaveCondUtils/scripts/makeTimingDB.py +++ b/Waveform/WaveformConditions/WaveCondUtils/scripts/makeTimingDB.py @@ -5,7 +5,8 @@ # Can test results with # AtlCoolConsole.py "sqlite://;schema=waveform_reco.db;dbname=OFLP200" -filename = 'waveform_reco.db' +#filename = 'waveform_reco.db' +filename = 'ALLP200.db' # Nominal trigger time in ns nominal_data = { @@ -89,14 +90,21 @@ for run, data in offset_data.items(): from PyCool import cool +from CoolConvUtilities.AtlCoolLib import indirectOpen dbSvc = cool.DatabaseSvcFactory.databaseService() connectString = f'sqlite://;schema={filename};dbname=CONDBR3' -print('Creating database') +print('Opening database') -dbSvc.dropDatabase( connectString ) -db = dbSvc.createDatabase( connectString ) +# Try to open existing, but if not create new +try: + db = indirectOpen(connectString, readOnly=False) +except Exception as e: + print(e) + print("Couldn't open, try creating new") + #dbSvc.dropDatabase( connectString ) + db = dbSvc.createDatabase( connectString ) # Nominal trigger times nominalSpec = cool.RecordSpecification() @@ -176,8 +184,16 @@ for run, data in offset_data.items(): connectString = f'sqlite://;schema={filename};dbname=OFLP200' -dbSvc.dropDatabase( connectString ) -db = dbSvc.createDatabase( connectString ) +print('Opening database') + +# Try to open existing, but if not create new +try: + db = indirectOpen(connectString, readOnly=False) +except Exception as e: + print(e) + print("Couldn't open, try creating new") + #dbSvc.dropDatabase( connectString ) + db = dbSvc.createDatabase( connectString ) # Nominal trigger times nominalSpec = cool.RecordSpecification() diff --git a/Waveform/WaveformConditions/WaveformConditionsTools/python/WaveformTimingConfig.py b/Waveform/WaveformConditions/WaveformConditionsTools/python/WaveformTimingConfig.py index 3b95ed388208d7d4ab51a3d5621ab868d438c433..dec0c3858667fc6dee844f96f82afcf6d5427e47 100644 --- a/Waveform/WaveformConditions/WaveformConditionsTools/python/WaveformTimingConfig.py +++ b/Waveform/WaveformConditions/WaveformConditionsTools/python/WaveformTimingConfig.py @@ -21,12 +21,15 @@ def WaveformTimingCfg(flags, **kwargs): # tool = kwargs.get("WaveformTimingTool", WaveformTimingTool(flags)) # Probably need to figure this out! dbInstance = kwargs.get("dbInstance", "TRIGGER_OFL") - if flags.Input.isMC: - dbname = "OFLP200" - else: - dbname = "CONDBR3" + # This is now set up globally, don't need this here + #if flags.Input.isMC: + # dbname = "OFLP200" + #else: + # dbname = "CONDBR3" - acc.merge(addFolders(flags, "/WAVE/DAQ/Timing", dbInstance, className="AthenaAttributeList", db=dbname)) - acc.merge(addFolders(flags, "/WAVE/DAQ/TimingOffset", dbInstance, className="CondAttrListCollection", db=dbname)) + #acc.merge(addFolders(flags, "/WAVE/DAQ/Timing", dbInstance, className="AthenaAttributeList", db=dbname)) + #acc.merge(addFolders(flags, "/WAVE/DAQ/TimingOffset", dbInstance, className="CondAttrListCollection", db=dbname)) + acc.merge(addFolders(flags, "/WAVE/DAQ/Timing", dbInstance, className="AthenaAttributeList")) + acc.merge(addFolders(flags, "/WAVE/DAQ/TimingOffset", dbInstance, className="CondAttrListCollection")) return acc