Skip to content
Snippets Groups Projects
Commit 0b0c801f authored by FASER Reco's avatar FASER Reco
Browse files

Pass arguments properly to ntuple maker

parent 50be09ff
No related branches found
No related tags found
2 merge requests!378Pass arguments properly to ntuple maker,!351Merging Muon code to create new Database tag
Pipeline #6583842 passed
......@@ -46,6 +46,8 @@ parser.add_argument("--partial", action='store_true',
parser.add_argument("-c", "--cond", default="",
help="Specify global conditions tag (default: OFLCOND-FASER-04)")
parser.add_argument("--backward", action='store_true',
help="Use backward CKF tracks (default: forward)")
parser.add_argument("--trigFilt", action='store_true',
help="apply trigger event filter")
......@@ -53,6 +55,8 @@ parser.add_argument("--scintFilt", action='store_true',
help="apply scintillator event filter")
parser.add_argument("--NoTrackFilt", action='store_true',
help="Don't apply track event filter (default: do)")
parser.add_argument("--noStable", action='store_true',
help="Don't apply stable beam requirement (default: do)")
parser.add_argument("--unblind", action='store_true',
help="Don't apply signal blinding (default: do)")
......@@ -218,6 +222,8 @@ print(f"Scintillator Filter = {args.scintFilt}")
print(f"Track Filter = {not args.NoTrackFilt}")
print(f"Blind = {not args.unblind}")
print(f"OnlyBlinded = {args.onlyblind}")
print(f"Stable Beams = {not args.noStable}")
print(f"Backward = {args.backward}")
print(f"GRL = {args.grl}")
# OK, lets run the job here
......@@ -270,13 +276,21 @@ if args.genie:
if args.fluka:
mc_kwargs['UseFlukaWeights'] = True
# Use backward tracks for ntuple
# Default for this parameter is "CKFTrackCollectionWithoutIFT"
if args.backward:
if args.isMC:
mc_kwargs['TrackCollectionWithoutIFT'] = "CKFTrackCollectionBackwardWithoutIFT"
else:
grl_kwargs['TrackCollectionWithoutIFT'] = "CKFTrackCollectionBackwardWithoutIFT"
# algorithm
from NtupleDumper.NtupleDumperConfig import NtupleDumperAlgCfg
if args.isMC:
acc.merge(NtupleDumperAlgCfg(ConfigFlags, outfile, **mc_kwargs))
else:
acc.merge(NtupleDumperAlgCfg(ConfigFlags, outfile, DoBlinding=(not args.unblind), OnlyBlinded=args.onlyblind, DoScintFilter = args.scintFilt, DoTrackFilter = (not args.NoTrackFilt), DoTrigFilter = args.trigFilt, **grl_kwargs))
acc.merge(NtupleDumperAlgCfg(ConfigFlags, outfile, DoBlinding=(not args.unblind), OnlyBlinded=args.onlyblind, DoScintFilter = args.scintFilt, DoTrackFilter = (not args.NoTrackFilt), DoTrigFilter = args.trigFilt, StableOnly = (not args.noStable), **grl_kwargs) )
if not args.verbose:
from AthenaConfiguration.ComponentFactory import CompFactory
......
......@@ -89,12 +89,48 @@ do
done
#
# Parse command-line options
dir_path=${1}
slice=${2}
nfiles=${3}
release_directory=${4}
working_directory=${5}
last_file=${6}
while [ -n "$1" ]
do
case "$1" in
--) # Stop parsing
shift; # Eat this
break;; # And stop parsing
-*) # Some option
break;; # Stop parsing
*) # Not an option, fill next value
if [ -z "$dir_path" ]
then
dir_path=${1}
shift
elif [ -z "$slice" ]
then
slice=${1}
shift
elif [ -z "$nfiles" ]
then
nfiles=${1}
shift
elif [ -z "$release_directory" ]
then
release_directory=${1}
shift
elif [ -z "$working_directory" ]
then
working_directory=${1}
shift
elif [ -z "$last_file" ]
then
last_file=${1}
shift
else
break # Already filled everything
fi
;;
esac
done
#
# Set defaults if arguments aren't provided
if [ -z "$dir_path" ]
......@@ -170,10 +206,12 @@ echo "Output: $output_directory"
echo "Starting: $starting_directory"
echo "job: $file_stem"
echo "Last: $last_file"
echo "Remaining: $@"
#
# Set up the release (do this automatically)?
# Must follow with -- to avoid command-line arguments being passed
export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase
source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh
source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh --
#
# Always go back to the starting directory in case paths are relative
cd "$starting_directory"
......@@ -183,7 +221,7 @@ cd "$release_directory"
asetup --input=calypso/asetup.faser Athena,22.0.49
source run/setup.sh
#
echo "Initial ATLAS_POOLCOND_PATH = $ATLAS_POOLCOND_PATH"
echo "ATLAS_POOLCOND_PATH = $ATLAS_POOLCOND_PATH"
#
# Check if there are data overrides
if [ -d "run/data/sqlite200" ]; then
......@@ -239,26 +277,12 @@ else
cp -r $cond_directory .
ls -R data
fi
#
# Further check if there is a pool conditions override
if [[ -d "data/poolcond" ]]; then
echo "Local POOL directory found!"
echo "Change ATLAS_POOLCOND_PATH"
echo " from $ATLAS_POOLCOND_PATH"
export ATLAS_POOLCOND_PATH=`pwd -P`/data
echo " to $ATLAS_POOLCOND_PATH"
else
echo "No local pool files found, use default:"
echo " $ATLAS_POOLCOND_PATH"
fi
echo "Final ATLAS_POOLCOND_PATH: $ATLAS_POOLCOND_PATH"
#
#
export EOS_MGM_URL=root://eospublic.cern.ch
#
# Run job
#
faser_ntuple_maker.py $last_file_str $partialstr $tagstr $ismc --slice $slice --files $nfiles $mergestr $flukastr $geniestr $unblindstr $dir_path
faser_ntuple_maker.py $last_file_str $partialstr $tagstr $ismc --slice $slice --files $nfiles $mergestr $flukastr $geniestr $unblindstr $dir_path "$@"
ntup_code=$?
echo "Return code: $ntup_code"
#
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment