diff --git a/PhysicsAnalysis/NeutrinoSearch/src/NeutrinoSearchAlg.cxx b/PhysicsAnalysis/NeutrinoSearch/src/NeutrinoSearchAlg.cxx
index e3d33d683e0b47d064216515cf83fb280cb9edd4..c353833fc5d100423c0a97c778cd5243db289184 100644
--- a/PhysicsAnalysis/NeutrinoSearch/src/NeutrinoSearchAlg.cxx
+++ b/PhysicsAnalysis/NeutrinoSearch/src/NeutrinoSearchAlg.cxx
@@ -750,4 +750,4 @@ NeutrinoSearchAlg::clearTree() const
   m_syVetoNu = 0;
   m_thetaxVetoNu = 0;
   m_thetayVetoNu = 0;
-}
\ No newline at end of file
+}
diff --git a/PhysicsAnalysis/NtupleDumper/CMakeLists.txt b/PhysicsAnalysis/NtupleDumper/CMakeLists.txt
index dc2dae3126aaa00283775626da1b29bbb0dd3ac3..fab78641d7ae2a3842904122968a80170bca9a28 100644
--- a/PhysicsAnalysis/NtupleDumper/CMakeLists.txt
+++ b/PhysicsAnalysis/NtupleDumper/CMakeLists.txt
@@ -1,4 +1,4 @@
-atlas_subdir(NtupleDumper)
+atlas_subdir( NtupleDumper )
 
 atlas_add_component(
         NtupleDumper
@@ -8,5 +8,5 @@ atlas_add_component(
         LINK_LIBRARIES AthenaBaseComps StoreGateLib xAODFaserWaveform xAODFaserCalorimeter xAODFaserTrigger xAODFaserLHC ScintIdentifier FaserCaloIdentifier GeneratorObjects FaserActsGeometryLib TrackerSimEvent TrackerSimData TrackerIdentifier TrackerReadoutGeometry TrkTrack GeoPrimitives TrackerRIO_OnTrack TrackerSpacePoint FaserActsKalmanFilterLib
 )
 
-atlas_install_python_modules(python/*.py)
-atlas_install_scripts(scripts/*.py)
+atlas_install_python_modules( python/*.py )
+atlas_install_scripts( scripts/*.py scripts/*.sh )
diff --git a/PhysicsAnalysis/NtupleDumper/python/NtupleDumperConfig.py b/PhysicsAnalysis/NtupleDumper/python/NtupleDumperConfig.py
index 5bc52c41df03c934465f97658a39aeb342350631..da44697fc23f3318f599d4451f03fcc196a3f463 100644
--- a/PhysicsAnalysis/NtupleDumper/python/NtupleDumperConfig.py
+++ b/PhysicsAnalysis/NtupleDumper/python/NtupleDumperConfig.py
@@ -2,103 +2,33 @@
     Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
 """
 
-from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator
 from AthenaConfiguration.ComponentFactory import CompFactory
 from MagFieldServices.MagFieldServicesConfig import MagneticFieldSvcCfg
+from FaserActsGeometry.ActsGeometryConfig import ActsTrackingGeometryToolCfg
 
-def NtupleDumperAlgCfg(flags, **kwargs):
+def NtupleDumperAlgCfg(flags, OutName, **kwargs):
     # Initialize GeoModel
     from FaserGeoModel.FaserGeoModelConfig import FaserGeometryCfg
     acc = FaserGeometryCfg(flags)
 
     acc.merge(MagneticFieldSvcCfg(flags))
-    # acc.merge(FaserActsTrackingGeometrySvcCfg(flags))
-    # acc.merge(FaserActsAlignmentCondAlgCfg(flags))
+    #acc.merge(ActsTrackingGeometrySvcCfg(flags))
+    #acc.merge(FaserActsAlignmentCondAlgCfg(flags))
+
+    result, actsTrackingGeometryTool = ActsTrackingGeometryToolCfg(flags)
+    acc.merge(result)
 
     actsExtrapolationTool = CompFactory.FaserActsExtrapolationTool("FaserActsExtrapolationTool")
-    actsExtrapolationTool.MaxSteps = 1000
-    actsExtrapolationTool.TrackingGeometryTool = CompFactory.FaserActsTrackingGeometryTool("TrackingGeometryTool")
+    actsExtrapolationTool.MaxSteps = 10000
+    actsExtrapolationTool.TrackingGeometryTool = actsTrackingGeometryTool 
 
     NtupleDumperAlg = CompFactory.NtupleDumperAlg("NtupleDumperAlg",**kwargs)
     NtupleDumperAlg.ExtrapolationTool = actsExtrapolationTool
     acc.addEventAlgo(NtupleDumperAlg)
 
     thistSvc = CompFactory.THistSvc()
-    thistSvc.Output += ["HIST2 DATAFILE='Data-tuple.root' OPT='RECREATE'"]
+    thistSvc.Output += [f"HIST2 DATAFILE='{OutName}' OPT='RECREATE'"]
     acc.addService(thistSvc)
 
     return acc
 
-if __name__ == "__main__":
-
-    import sys
-    from AthenaCommon.Logging import log, logging
-    from AthenaCommon.Constants import DEBUG, VERBOSE, INFO
-    from AthenaCommon.Configurable import Configurable
-    from CalypsoConfiguration.AllConfigFlags import ConfigFlags
-    from AthenaConfiguration.TestDefaults import defaultTestFiles
-    from CalypsoConfiguration.MainServicesConfig import MainServicesCfg
-    from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg
-    # from OutputStreamAthenaPool.OutputStreamConfig import OutputStreamCfg
-
-    # Set up logging and new style config
-    log.setLevel(INFO)
-    Configurable.configurableRun3Behavior = True
-
-    # Configure
-    ConfigFlags.Input.Files = [
-          '/eos/experiment/faser/rec/2022/p0008//008119/Faser-Physics-008119-00168-p0008-xAOD.root',
-
-
-    ]
-    ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-02"             # Always needed; must match FaserVersionS
-    ConfigFlags.IOVDb.DatabaseInstance = "OFLP200"               # Use MC conditions for now
-    ConfigFlags.Input.ProjectName = "data21"                     # Needed to bypass autoconfig
-    ConfigFlags.Input.isMC = False                                # Needed to bypass autoconfig
-    ConfigFlags.GeoModel.FaserVersion     = "FASERNU-03"           # FASER geometry
-    ConfigFlags.Common.isOnline = False
-    ConfigFlags.GeoModel.Align.Dynamic = False
-    ConfigFlags.Beam.NumberOfCollisions = 0.
-
-    ConfigFlags.Detector.GeometryFaserSCT = True
-
-    ConfigFlags.lock()
-
-    # Core components
-    acc = MainServicesCfg(ConfigFlags)
-    acc.merge(PoolReadCfg(ConfigFlags))
-
-    # algorithm
-    acc.merge(NtupleDumperAlgCfg(ConfigFlags, UseFlukaWeights=True))
-
-    # silencio
-    AthenaEventLoopMgr = CompFactory.AthenaEventLoopMgr()
-    AthenaEventLoopMgr.EventPrintoutInterval=500
-    acc.addService(AthenaEventLoopMgr)
-
-    # # Hack to avoid problem with our use of MC databases when isMC = False
-    replicaSvc = acc.getService("DBReplicaSvc")
-    replicaSvc.COOLSQLiteVetoPattern = ""
-    replicaSvc.UseCOOLSQLite = True
-    replicaSvc.UseCOOLFrontier = False
-    replicaSvc.UseGeomSQLite = True
-
-    # Timing
-    #acc.merge(MergeRecoTimingObjCfg(ConfigFlags))
-
-    # Dump config
-    # logging.getLogger('forcomps').setLevel(VERBOSE)
-    # acc.foreach_component("*").OutputLevel = VERBOSE
-    # acc.foreach_component("*ClassID*").OutputLevel = INFO
-    # acc.getCondAlgo("FaserSCT_AlignCondAlg").OutputLevel = VERBOSE
-    # acc.getCondAlgo("FaserSCT_DetectorElementCondAlg").OutputLevel = VERBOSE
-    # acc.getService("StoreGateSvc").Dump = True
-    # acc.getService("ConditionStore").Dump = True
-    # acc.printConfig(withDetails=True)
-    # ConfigFlags.dump()
-
-    # Execute and finish
-    sc = acc.run(maxEvents=-1)
-
-    # Success should be 0
-    sys.exit(not sc.isSuccess())    
diff --git a/PhysicsAnalysis/NtupleDumper/scripts/analyzeRun.py b/PhysicsAnalysis/NtupleDumper/scripts/analyzeRun.py
index eaca9c5b1db2cff5fe7a5e326f1d35148d4041e6..8cb79460633e7cf16ba5818a43206c6dbe7c20ab 100755
--- a/PhysicsAnalysis/NtupleDumper/scripts/analyzeRun.py
+++ b/PhysicsAnalysis/NtupleDumper/scripts/analyzeRun.py
@@ -6,38 +6,9 @@
 
 from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator
 from AthenaConfiguration.ComponentFactory import CompFactory
-from MagFieldServices.MagFieldServicesConfig import MagneticFieldSvcCfg
 
-from FaserActsGeometry.ActsGeometryConfig import ActsTrackingGeometryToolCfg
-# from FaserActsGeometry.ActsGeometryConfig import ActsTrackingGeometrySvcCfg
-# from FaserActsGeometry.ActsGeometryConfig import ActsExtrapolationToolCfg
-
-
-def NtupleDumperAlgCfg(flags, OutName, **kwargs):
-    # Initialize GeoModel
-    from FaserGeoModel.FaserGeoModelConfig import FaserGeometryCfg
-    acc = FaserGeometryCfg(flags)
-
-    acc.merge(MagneticFieldSvcCfg(flags))
-    #acc.merge(ActsTrackingGeometrySvcCfg(flags))
-    #acc.merge(FaserActsAlignmentCondAlgCfg(flags))
-
-    result, actsTrackingGeometryTool = ActsTrackingGeometryToolCfg(flags)
-    acc.merge(result)
-
-    actsExtrapolationTool = CompFactory.FaserActsExtrapolationTool("FaserActsExtrapolationTool")
-    actsExtrapolationTool.MaxSteps = 10000
-    actsExtrapolationTool.TrackingGeometryTool = actsTrackingGeometryTool 
-
-    NtupleDumperAlg = CompFactory.NtupleDumperAlg("NtupleDumperAlg",**kwargs)
-    NtupleDumperAlg.ExtrapolationTool = actsExtrapolationTool
-    acc.addEventAlgo(NtupleDumperAlg)
-
-    thistSvc = CompFactory.THistSvc()
-    thistSvc.Output += [f"HIST2 DATAFILE='{OutName}' OPT='RECREATE'"]
-    acc.addService(thistSvc)
-
-    return acc
+# Move NtupleDumperAlgCfg to python/NtupleDumperConfig
+from NtupleDumper.NtupleDumperConfig import NtupleDumperAlgCfg
 
 if __name__ == "__main__":
 
diff --git a/PhysicsAnalysis/NtupleDumper/scripts/faser_ntuple_maker.py b/PhysicsAnalysis/NtupleDumper/scripts/faser_ntuple_maker.py
new file mode 100755
index 0000000000000000000000000000000000000000..0f725fe9a5a4bf2dc63b915ccf9768b1ffd60c0f
--- /dev/null
+++ b/PhysicsAnalysis/NtupleDumper/scripts/faser_ntuple_maker.py
@@ -0,0 +1,220 @@
+#!/usr/bin/env python
+# 
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration 
+#
+# Run with
+# faser_ntuple_maker.py [options] 
+#
+# Options:
+#   --isMC - needed to reconstruct MC data
+#
+import sys
+import time
+import argparse
+
+a = time.time()
+
+parser = argparse.ArgumentParser(description="Run PHYS ntuple production")
+
+parser.add_argument("path",
+                    help="Fully qualified input path (directory or file)")
+
+parser.add_argument("--slice", type=int, default=0,
+                    help="Specify ordinal output file to produce")
+parser.add_argument("--files", type=int, default=1,
+                    help="Specify files per slice")
+parser.add_argument("--last", type=int, default=0,
+                    help="Specify last file in slice (normally --files)")
+
+parser.add_argument("--outfile", default="",
+                    help="Override output file name")
+                    
+parser.add_argument("-v", "--verbose", action='store_true', 
+                    help="Turn on DEBUG output")
+parser.add_argument("-n", "--nevents", type=int, default=-1,
+                    help="Specify number of events to process (default: all)")
+
+parser.add_argument("-t", "--tag", default="",
+                    help="Specify tag (to append to output filename)")
+
+parser.add_argument("--isMC", action='store_true',
+                    help="Running on digitised MC rather than data")
+parser.add_argument("--partial", action='store_true',
+                    help="Allow partial input files")
+ 
+args = parser.parse_args()
+
+from pathlib import Path
+
+# Must figure out what we are doing here
+filepath = Path(args.path)
+
+filelist = []
+# If this is a directory, need to create file list
+if filepath.is_dir():
+
+    # Parsing MC is tricky
+    if args.isMC:
+        print("Monte Carlo not supported yet!")
+        sys.exit(0)
+
+    # Use expected data pattern to find files
+    runstr = filepath.stem
+    start = args.slice * args.files
+    if args.last > 0:
+        end = start + args.last
+    else:
+        end = start + args.files
+
+    for seg in range(start, end):
+        searchstr = f"Faser-Physics-{runstr}-{seg:05d}-*xAOD.root"
+        flist = list(filepath.glob(searchstr))
+        if len(flist) == 0:
+            print(f"Didn't find file {searchstr}!")
+            if args.partial: continue
+            sys.exit(1)
+
+        elif len(flist) > 1:
+            print(f"Found multiple matches for {searchstr}!")
+            print(flist)
+            sys.exit(1)
+
+        filestr = str(flist[0].resolve())
+        # Use proper EOS file path here?
+        if filestr[:4] == '/eos':
+            filestr = f"root://eospublic.cern.ch/{filestr}"
+        filelist.append(filestr)
+    # End of loop over segments
+
+    firstfile = Path(filelist[0])
+    firststem = str(firstfile.stem)
+    firstseg = firststem.split('-')[3]
+
+    lastfile = Path(filelist[-1])
+    laststem = str(lastfile.stem)
+    lastseg = laststem.split('-')[3]
+
+    # Find any tags
+    tagstr = firststem.replace(f"Faser-Physics-{runstr}-{firstseg}", "")
+    tagstr = tagstr.replace("-xAOD", "")
+    print(f"Tag = {tagstr}")
+
+    # Build output name
+    outfile = f"Faser-Physics-{runstr}-{firstseg}-{lastseg}"
+
+    # This will include the leading -
+    if len(tagstr) > 0:
+        outfile += f"{tagstr}"
+
+    if len(args.tag) > 0 and args.tag not in tagstr:
+        outfile += f"-{args.tag}"
+
+    outfile += "-PHYS.root"
+
+# If this is a single file, just process that
+# Could be a url, so don't check if this is a file
+else:
+    filelist.append(args.path)
+
+    # Build output name
+    filestem = str(filepath.stem)
+
+    # Remove -xAOD from end
+    if filestem[-5:] == "-xAOD":
+        filestem = filestem[:-5]
+
+    # Do we want to add a tag?
+    if len(args.tag) > 0 and args.tag not in filestem:
+        filestem += f"-{args.tag}"
+
+    outfile = f"{filestem}-PHYS.root"
+
+# Print out what we found
+if len(filelist) == 0:
+    printf("Found no files for {args.path}!")
+    sys.exit(1)
+elif len(filelist) == 1:
+    print("Processing file:")
+else:
+    print("Processing files:")
+for f in filelist:
+    print(f)
+
+# Override output file?
+if len(args.outfile) > 0:
+    outfile = args.outfile
+
+print("Output file:")
+print(outfile)
+
+# OK, lets run the job here
+from AthenaCommon.Logging import log, logging
+from AthenaCommon.Constants import DEBUG, VERBOSE, INFO
+from AthenaCommon.Configurable import Configurable
+from CalypsoConfiguration.AllConfigFlags import ConfigFlags
+
+from CalypsoConfiguration.MainServicesConfig import MainServicesCfg
+from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg
+
+Configurable.configurableRun3Behavior = True
+
+# Configure
+ConfigFlags.Input.Files = filelist
+ConfigFlags.IOVDb.GlobalTag = "OFLCOND-FASER-03"         # Always needed; must match FaserVersionS
+if args.isMC:
+    ConfigFlags.IOVDb.DatabaseInstance = "OFLP200"           # Use MC conditions 
+    ConfigFlags.Input.isMC = True                            # Needed to bypass autoconfig
+else:
+    ConfigFlags.IOVDb.DatabaseInstance = "CONDBR3"           # Use data conditions
+    ConfigFlags.Input.isMC = False                           # Needed to bypass autoconfig
+
+ConfigFlags.Input.ProjectName = "data21"                     # Needed to bypass autoconfig
+
+ConfigFlags.GeoModel.FaserVersion   = "FASERNU-03"           # FASER geometry
+ConfigFlags.Common.isOnline = False
+ConfigFlags.GeoModel.Align.Dynamic = False
+ConfigFlags.Beam.NumberOfCollisions = 0.
+ConfigFlags.Detector.GeometryFaserSCT = True
+ConfigFlags.lock()
+
+# Core components
+acc = MainServicesCfg(ConfigFlags)
+acc.merge(PoolReadCfg(ConfigFlags))
+
+# algorithm
+from NtupleDumper.NtupleDumperConfig import NtupleDumperAlgCfg
+if args.isMC:
+    acc.merge(NtupleDumperAlgCfg(ConfigFlags, outfile, UseFlukaWeights=True))
+else:
+    acc.merge(NtupleDumperAlgCfg(ConfigFlags, outfile))
+
+from AthenaConfiguration.ComponentFactory import CompFactory
+AthenaEventLoopMgr = CompFactory.AthenaEventLoopMgr()
+AthenaEventLoopMgr.EventPrintoutInterval=1000
+acc.addService(AthenaEventLoopMgr)
+
+# Hack to avoid problem with our use of MC databases when isMC = False
+if not args.isMC:
+    replicaSvc = acc.getService("DBReplicaSvc")
+    replicaSvc.COOLSQLiteVetoPattern = ""
+    replicaSvc.UseCOOLSQLite = True
+    replicaSvc.UseCOOLFrontier = False
+    replicaSvc.UseGeomSQLite = True
+
+if args.verbose:
+    log.setLevel(VERBOSE)
+    acc.printConfig(withDetails=True)
+    ConfigFlags.dump()
+else:
+    log.setLevel(INFO)
+
+acc.getService("MessageSvc").Format = "% F%40W%S%7W%R%T %0W%M"
+
+# Execute and finish
+sc = acc.run(maxEvents=args.nevents)
+
+b = time.time()
+log.info(f"Finish execution in {b-a} seconds")
+
+# Success should be 0
+sys.exit(int(sc.isFailure()))
diff --git a/PhysicsAnalysis/NtupleDumper/scripts/submit_faser_ntuple_maker.sh b/PhysicsAnalysis/NtupleDumper/scripts/submit_faser_ntuple_maker.sh
new file mode 100755
index 0000000000000000000000000000000000000000..d32998724cb479aeff240dee489257b9f1a0f3fd
--- /dev/null
+++ b/PhysicsAnalysis/NtupleDumper/scripts/submit_faser_ntuple_maker.sh
@@ -0,0 +1,252 @@
+#!/bin/bash
+#
+# Used with a condor file to submit a physics ntuple job
+#
+# Usage:
+# submit_ntuple_maker.sh dirpath slice nfiles [release_directory] [working_directory] 
+#
+# Options:
+#   --out - specify output location (in EOS) to copy output HITS file
+#   --log - specify output location (in EOS) for log file
+#   --isMC - needed for MC reco
+#   --partial - allow missing files
+#
+# dirpath - full directory path to HITS files
+# slice - ordinal output file number
+# nfiles - number of HITS files to process per slice
+# release_directory - optional path to release install directory (default pwd)
+# working_directory - optional path to output directory location (default pwd)
+# lastfile - number of files in this slice (for last file, default 0)
+#
+# The release directory must already be set up 
+# (so an unqualified asetup can set up the release properly)
+#
+#----------------------------------------
+# Keep track of time
+SECONDS=0
+#
+# Defaults
+ismc=""
+partialstr=""
+#
+# Parse command-line options
+while [ -n "$1" ]
+do 
+  case "$1" in
+      -l | --log)
+	  logdest="$2";
+	  shift;
+	  shift;; # Must eat 2 options here
+
+      -o | --out)
+	  outdest="$2";
+	  shift;
+	  shift;;
+
+      --isMC)
+	  ismc="--isMC"
+	  shift;;
+
+      --partial)
+	  echo "Allowing partial merge"
+	  partialstr="--partial"
+	  shift;;
+
+      --) # End of options
+	  shift; # Eat this
+	  break;; # And stop parsing
+
+    -*) 
+	  echo "Unknown option $1"
+	  shift;;
+
+    *) break;;  # Not an option, don't shift
+  esac
+done
+#
+# Parse command-line options
+dir_path=${1}
+slice=${2}
+nfiles=${3}
+release_directory=${4}
+working_directory=${5}
+last_file=${6}
+#
+# Set defaults if arguments aren't provided
+if [ -z "$dir_path" ]
+then
+  echo "No directory specified!"
+  echo "Usage: submit_ntuple_maker.sh directory slice nfiles [release dir] [output dir]"
+  exit 1
+fi
+#
+if [ -z "$slice" ]
+then
+  echo "Slice number not specified!"
+  echo "Usage: submit_ntuple_maker.sh directory slice nfiles [release dir] [output dir]"
+  exit 1
+fi
+#
+if [ -z "$nfiles" ]
+then
+  echo "Files per slice not specified!"
+  echo "Usage: submit_ntuple_maker.sh directory slice nfiles [release dir] [output dir]"
+  exit 1
+fi
+#
+if [ -z "$release_directory" ]
+then
+  release_directory=`pwd`
+fi
+#
+if [ -z "$working_directory" ]
+then
+  working_directory=`pwd`
+fi
+#
+starting_directory=`pwd`
+#
+# Now extract the run number and file stem
+#
+# First, get an example filename
+file_name=`ls -1 $dir_path | head -1`
+# 
+# Now split based on '.' to get stem
+defaultIFS=$IFS
+IFS='.'
+read file_stem ext <<< "$file_name"
+#
+# Finally extract the run number
+IFS='-'
+# Read the split words into an array based on delimiter
+read faser short run_number segment <<< "$file_stem"
+#
+# Set the IFS delimeter back or else echo doesn't work...
+IFS=$defaultIFS
+#
+# Make output directory if needed
+output_directory="$working_directory/$run_number"
+mkdir -p "$output_directory"
+#
+# Need to make up an output name
+printf -v slicestr "%04d" $slice
+#
+file_stem="$faser-$short-$run_number-$slicestr-PHYS"
+#
+# This magic redirects everything in this script to our log file
+logfile="${file_stem}.ntp.log"
+exec >& "$output_directory/$logfile"
+echo `date` - $HOSTNAME
+echo "Directory: $dir_path"
+echo "Slice: $slice"
+echo "NFiles: $nfiles"
+echo "Release: $release_directory"
+echo "Output: $output_directory"
+echo "Starting: $starting_directory"
+echo "job: $file_stem"
+echo "Last: $last_file"
+#
+# Set up the release (do this automatically)?
+export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase
+source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh 
+#
+# Always go back to the starting directory in case paths are relative
+cd "$starting_directory"
+cd "$release_directory"
+#
+# Do this by hand
+asetup --input=calypso/asetup.faser Athena,22.0.49
+source run/setup.sh
+#
+echo "ATLAS_POOLCOND_PATH = $ATLAS_POOLCOND_PATH"
+#
+# Check if there are data overrides
+if [ -d "run/data/sqlite200" ]; then
+   cond_directory=`pwd -P`/run/data  # Get absolute path
+fi
+#
+# Try to find a release tag
+cd calypso
+recotag=`git describe --tags`
+if [[ "$recotag" == "reco/r"???? ]]; then
+  tag=`echo "$recotag" | cut -c 6-11`
+  echo "Found reco tag: $tag"
+fi
+if [[ "$recotag" == "digi/d"???? ]]; then
+  tag=`echo "$recotag" | cut -c 6-11`
+  echo "Found digi tag: $tag"
+fi
+if [[ "$recotag" == "sim/s"???? ]]; then
+  tag=`echo "$recotag" | cut -c 5-10`
+  echo "Found sim tag: $tag"
+fi
+#
+if [[ -z "$tag" ]]; then
+    tagstr=""
+else
+    tagstr="--tag $tag"
+fi
+#
+if [[ -z "$last_file" ]]; then
+    last_file_str=""
+else
+    last_file_str="--last $last_file"
+fi
+#
+# Move to the run directory
+cd "$starting_directory"
+cd "$output_directory"
+#
+# Make run directory
+if [[ -e "$file_stem" ]]; then
+    echo "Directory $file_stem already exists"
+else
+    mkdir "$file_stem"
+fi
+cd "$file_stem"
+#
+# Check if there are data overrides in the relese directory
+if [[ -z "$cond_directory" ]]; then
+    echo "No local conditions directory found!"
+else
+    echo "Local conditions directory found!  Copying to run directory..."
+    echo Copying $cond_directory
+    cp -r $cond_directory .
+    ls -R data
+fi
+#
+export EOS_MGM_URL=root://eospublic.cern.ch
+#
+# Run job
+#
+faser_ntuple_maker.py $last_file_str $partialstr $tagstr $ismc --slice $slice --files $nfiles $dir_path
+#
+# Print out ending time
+date
+echo "Job finished after $SECONDS seconds"
+# 
+# Copy output to EOS if desired
+if ! [ -z "$outdest" ]
+then
+    ls -l
+    echo "copy *-PHYS.root to $outdest"
+    mkdir -p $outdest
+    eos cp *-PHYS.root ${outdest}/ || true
+fi
+#
+# Also copy log file
+if ! [ -z "$logdest" ]
+then
+    cd ..
+    ls -l
+    echo "copy $logfile to $logdest"
+    mkdir -p $logdest
+    eos cp $logfile $logdest/$logfile
+elif ! [ -z "$outdest" ]
+then 
+    cd ..
+    ls -l
+    echo "copy $logfile to $outdest"
+    mkdir -p $outdest
+    eos cp $logfile $outdest/$logfile
+fi
diff --git a/Tracker/TrackerRecAlgs/MyExtrapolationExample/src/MyExtrapolationExample.cxx b/Tracker/TrackerRecAlgs/MyExtrapolationExample/src/MyExtrapolationExample.cxx
index aecebc372e415c6c58f1464706c6f25ea6001dc2..a194e065b23c83dc4b345a73a50f1ec499a95916 100644
--- a/Tracker/TrackerRecAlgs/MyExtrapolationExample/src/MyExtrapolationExample.cxx
+++ b/Tracker/TrackerRecAlgs/MyExtrapolationExample/src/MyExtrapolationExample.cxx
@@ -23,6 +23,8 @@ StatusCode MyExtrapolationExample::initialize() {
 }
 
 StatusCode MyExtrapolationExample::execute(const EventContext &ctx) const {
+  // The following uses the nominal geometry (without alignment)
+  // To use the aligned version, use getGeometryContext() instead
   const Acts::GeometryContext gctx =
       m_extrapolationTool->trackingGeometryTool()->getNominalGeometryContext().context();
 
diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.cxx
index 4161c19eecbc6b30b0436732f76498f71f3f4fa6..817fcfea171e76d70d75f49a540e0604f2f52984 100644
--- a/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.cxx
+++ b/Tracking/Acts/FaserActsKalmanFilter/src/ActsTrackSeedTool.cxx
@@ -36,7 +36,7 @@ StatusCode ActsTrackSeedTool::run(std::vector<int> /*maskedLayers*/) {
 
   using IdentifierMap = std::map<Identifier, Acts::GeometryIdentifier>;
   std::shared_ptr<IdentifierMap> identifierMap = m_trackingGeometryTool->getIdentifierMap();
-  const FaserActsGeometryContext& gctx = m_trackingGeometryTool->getNominalGeometryContext();
+  const FaserActsGeometryContext& gctx = m_trackingGeometryTool->getGeometryContext();
   Acts::GeometryContext geoctx = gctx.context();
 
   const int kSize = 1;
diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.cxx
index 858b97d6b2655859d77f027b31a57ce5782b95ae..3647ac98abdd9e5176fb0902fb368e89dc741dcb 100644
--- a/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.cxx
+++ b/Tracking/Acts/FaserActsKalmanFilter/src/CKF2.cxx
@@ -84,7 +84,7 @@ StatusCode CKF2::execute() {
   std::shared_ptr<const Acts::TrackingGeometry> trackingGeometry
       = m_trackingGeometryTool->trackingGeometry();
 
-  const FaserActsGeometryContext& faserActsGeometryContext = m_trackingGeometryTool->getNominalGeometryContext();
+  const FaserActsGeometryContext& faserActsGeometryContext = m_trackingGeometryTool->getGeometryContext();
   auto gctx = faserActsGeometryContext.context();
   Acts::MagneticFieldContext magFieldContext = getMagneticFieldContext(ctx);
   Acts::CalibrationContext calibContext;
diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/CombinatorialKalmanFilterAlg.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/CombinatorialKalmanFilterAlg.cxx
index ee2a864b51d0cd632322128256666679c4beb490..dcb167496790d2b5af143b55b86cc4698f902976 100644
--- a/Tracking/Acts/FaserActsKalmanFilter/src/CombinatorialKalmanFilterAlg.cxx
+++ b/Tracking/Acts/FaserActsKalmanFilter/src/CombinatorialKalmanFilterAlg.cxx
@@ -73,7 +73,7 @@ StatusCode CombinatorialKalmanFilterAlg::execute() {
   std::shared_ptr<const Acts::TrackingGeometry> trackingGeometry
       = m_trackingGeometryTool->trackingGeometry();
 
-  const FaserActsGeometryContext& gctx = m_trackingGeometryTool->getNominalGeometryContext();
+  const FaserActsGeometryContext& gctx = m_trackingGeometryTool->getGeometryContext();
   auto geoctx = gctx.context();
   Acts::MagneticFieldContext magFieldContext = getMagneticFieldContext(ctx);
   Acts::CalibrationContext calibContext;
diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/FaserActsKalmanFilterAlg.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/FaserActsKalmanFilterAlg.cxx
index f88b4e15f59fc7e7d3a4b434c8328942982a67ba..5dd831f9592b5323280d5cc09e10798fa342966f 100755
--- a/Tracking/Acts/FaserActsKalmanFilter/src/FaserActsKalmanFilterAlg.cxx
+++ b/Tracking/Acts/FaserActsKalmanFilter/src/FaserActsKalmanFilterAlg.cxx
@@ -109,7 +109,7 @@ StatusCode FaserActsKalmanFilterAlg::execute() {
   std::shared_ptr<const Acts::TrackingGeometry> trackingGeometry
       = m_trackingGeometryTool->trackingGeometry();
 
-  const FaserActsGeometryContext& gctx = m_trackingGeometryTool->getNominalGeometryContext();
+  const FaserActsGeometryContext& gctx = m_trackingGeometryTool->getGeometryContext();
   auto geoctx = gctx.context();
   Acts::MagneticFieldContext magctx = getMagneticFieldContext(ctx);
   Acts::CalibrationContext calctx;
diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/TruthSeededTrackFinderTool.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/TruthSeededTrackFinderTool.cxx
index f0d9796f1a16ce52aee2bf52dbc11ec28c96a7a5..7f9f427960ba221d8fd3f489cbb0e38d627ffc9f 100644
--- a/Tracking/Acts/FaserActsKalmanFilter/src/TruthSeededTrackFinderTool.cxx
+++ b/Tracking/Acts/FaserActsKalmanFilter/src/TruthSeededTrackFinderTool.cxx
@@ -46,7 +46,7 @@ StatusCode TruthSeededTrackFinderTool::run() {
   SG::ReadHandle<SpacePointForSeedCollection> spacePointCollection {m_spacePointCollectionKey};
   ATH_CHECK(spacePointCollection.isValid());
 
-  Acts::GeometryContext geoctx = m_trackingGeometryTool->getNominalGeometryContext().context();
+  Acts::GeometryContext geoctx = m_trackingGeometryTool->getGeometryContext().context();
 
   std::shared_ptr<const Acts::TrackingGeometry> trackingGeometry
       = m_trackingGeometryTool->trackingGeometry();
diff --git a/Tracking/Acts/FaserActsKalmanFilter/src/TruthTrackFinderTool.cxx b/Tracking/Acts/FaserActsKalmanFilter/src/TruthTrackFinderTool.cxx
index bfd5e9b740bbeb53529fc2c6e1048656187e1d5a..672be6fa934ead4725a43d81bc2f5e156612449e 100644
--- a/Tracking/Acts/FaserActsKalmanFilter/src/TruthTrackFinderTool.cxx
+++ b/Tracking/Acts/FaserActsKalmanFilter/src/TruthTrackFinderTool.cxx
@@ -39,7 +39,7 @@ StatusCode TruthTrackFinderTool::run() {
 
   using IdentifierMap = std::map<Identifier, Acts::GeometryIdentifier>;
   std::shared_ptr<IdentifierMap> identifierMap = m_trackingGeometryTool->getIdentifierMap();
-  Acts::GeometryContext gctx = m_trackingGeometryTool->getNominalGeometryContext().context();
+  Acts::GeometryContext gctx = m_trackingGeometryTool->getGeometryContext().context();
   const int kSize = 2;
   using ParametersVector = Acts::ActsVector<kSize>;
   using CovarianceMatrix = Acts::ActsSymMatrix<kSize>;
@@ -188,4 +188,4 @@ StatusCode TruthTrackFinderTool::run() {
 
 StatusCode TruthTrackFinderTool::finalize() {
   return StatusCode::SUCCESS;
-}
\ No newline at end of file
+}