diff --git a/PhysicsAnalysis/NtupleDumper/scripts/faser_ntuple_maker.py b/PhysicsAnalysis/NtupleDumper/scripts/faser_ntuple_maker.py
index 8020e7ba12d265ecc715b5bab2d9dfa404a9b2b3..4bbd8c04239641ee8062efded999750ae3a65592 100755
--- a/PhysicsAnalysis/NtupleDumper/scripts/faser_ntuple_maker.py
+++ b/PhysicsAnalysis/NtupleDumper/scripts/faser_ntuple_maker.py
@@ -46,6 +46,8 @@ parser.add_argument("--partial", action='store_true',
 
 parser.add_argument("-c", "--cond", default="",
                     help="Specify global conditions tag (default: OFLCOND-FASER-04)")
+parser.add_argument("--backward", action='store_true',
+                    help="Use backward CKF tracks (default: forward)")
 
 parser.add_argument("--trigFilt", action='store_true',
                     help="apply trigger event filter")
@@ -53,7 +55,7 @@ parser.add_argument("--scintFilt", action='store_true',
                     help="apply scintillator event filter")
 parser.add_argument("--NoTrackFilt", action='store_true',
                     help="Don't apply track event filter (default: do)")
-parser.add_argument("--noStable", action='store_true',
+parser.add_argument("--no_stable", action='store_true',
                     help="Don't apply stable beam requirement (default: do)")
 
 parser.add_argument("--unblind", action='store_true',
@@ -220,7 +222,8 @@ print(f"Scintillator Filter  = {args.scintFilt}")
 print(f"Track Filter = {not args.NoTrackFilt}")
 print(f"Blind = {not args.unblind}")
 print(f"OnlyBlinded = {args.onlyblind}")
-print(f"Stable Beams = {not args.noStable}")
+print(f"Stable Beams = {not args.no_stable}")
+print(f"Backward = {args.backward}")
 print(f"GRL = {args.grl}")
 
 # OK, lets run the job here
@@ -273,13 +276,21 @@ if args.genie:
 if args.fluka:
     mc_kwargs['UseFlukaWeights'] = True
 
+# Use backward tracks for ntuple
+# Default for this parameter is "CKFTrackCollectionWithoutIFT"
+if args.backward:
+    if args.isMC:
+        mc_kwargs['TrackCollectionWithoutIFT'] = "CKFTrackCollectionBackwardWithoutIFT"
+    else:
+        grl_kwargs['TrackCollectionWithoutIFT'] = "CKFTrackCollectionBackwardWithoutIFT"
+
 # algorithm
 from NtupleDumper.NtupleDumperConfig import NtupleDumperAlgCfg
 if args.isMC:
     acc.merge(NtupleDumperAlgCfg(ConfigFlags, outfile, **mc_kwargs))
 
 else:
-    acc.merge(NtupleDumperAlgCfg(ConfigFlags, outfile, DoBlinding=(not args.unblind), OnlyBlinded=args.onlyblind, DoScintFilter = args.scintFilt, DoTrackFilter = (not args.NoTrackFilt), DoTrigFilter = args.trigFilt, StableOnly = (not args.noStable), **grl_kwargs) )
+    acc.merge(NtupleDumperAlgCfg(ConfigFlags, outfile, DoBlinding=(not args.unblind), OnlyBlinded=args.onlyblind, DoScintFilter = args.scintFilt, DoTrackFilter = (not args.NoTrackFilt), DoTrigFilter = args.trigFilt, StableOnly = (not args.no_stable), **grl_kwargs) )
 
 if not args.verbose:
     from AthenaConfiguration.ComponentFactory import CompFactory
diff --git a/PhysicsAnalysis/NtupleDumper/scripts/submit_faser_ntuple_maker.sh b/PhysicsAnalysis/NtupleDumper/scripts/submit_faser_ntuple_maker.sh
index d50950316dd4528ac4d74a1ad0782c2fa21c1fbe..70b2f7cb67b978da9bf415f5c6513d68bae0ed12 100755
--- a/PhysicsAnalysis/NtupleDumper/scripts/submit_faser_ntuple_maker.sh
+++ b/PhysicsAnalysis/NtupleDumper/scripts/submit_faser_ntuple_maker.sh
@@ -14,6 +14,8 @@
 #   --fluka - create fluka weights
 #   --genie - create genie weights
 #
+# Other --option will be passed to faser_ntuple_maker.py
+#
 # dirpath - full directory path to HITS files
 # slice - ordinal output file number
 # nfiles - number of HITS files to process per slice
@@ -32,10 +34,6 @@ SECONDS=0
 ismc=""
 partialstr=""
 mergestr=""
-flukastr=""
-geniestr=""
-unblindstr=""
-stablestr=""
 #
 # Parse command-line options
 while [ -n "$1" ]
@@ -65,22 +63,6 @@ do
 	  shift;
 	  shift;;
 
-      --fluka)
-	  flukastr="--fluka";
-	  shift;;
-
-      --genie)
-	  geniestr="--genie";
-	  shift;;
-
-      --unblind)
-	  unblindstr="--unblind";
-	  shift;;
-
-      --no_stable)
-	  stablestr="--noStable";
-	  shift;;
-
       --) # End of options
 	  shift; # Eat this
 	  break;; # And stop parsing
@@ -94,12 +76,48 @@ do
 done
 #
 # Parse command-line options
-dir_path=${1}
-slice=${2}
-nfiles=${3}
-release_directory=${4}
-working_directory=${5}
-last_file=${6}
+while [ -n "$1" ]
+do
+  case "$1" in
+    --) # Stop parsing
+      shift; # Eat this
+      break;; # And stop parsing
+
+    -*) # Some option
+      break;; # Stop parsing
+
+    *)  # Not an option, fill next value
+
+      if [ -z "$dir_path" ] 
+      then
+	  dir_path=${1}
+	  shift
+      elif [ -z "$slice" ]
+      then
+	  slice=${1}
+	  shift
+      elif [ -z "$nfiles" ] 
+      then
+	  nfiles=${1}
+	  shift
+      elif [ -z "$release_directory" ]
+      then
+	  release_directory=${1}
+	  shift
+      elif [ -z "$working_directory" ]
+      then
+	  working_directory=${1}
+	  shift
+      elif [ -z "$last_file" ]
+      then
+	  last_file=${1}
+	  shift
+      else
+	  break # Already filled everything
+      fi
+      ;;
+  esac
+done
 #
 # Set defaults if arguments aren't provided
 if [ -z "$dir_path" ]
@@ -175,10 +193,12 @@ echo "Output: $output_directory"
 echo "Starting: $starting_directory"
 echo "job: $file_stem"
 echo "Last: $last_file"
+echo "Remaining: $@"
 #
 # Set up the release (do this automatically)?
+# Must follow with -- to avoid command-line arguments being passed
 export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase
-source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh 
+source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh --
 #
 # Always go back to the starting directory in case paths are relative
 cd "$starting_directory"
@@ -188,7 +208,7 @@ cd "$release_directory"
 asetup --input=calypso/asetup.faser Athena,22.0.49
 source run/setup.sh
 #
-echo "Initial ATLAS_POOLCOND_PATH = $ATLAS_POOLCOND_PATH"
+echo "ATLAS_POOLCOND_PATH = $ATLAS_POOLCOND_PATH"
 #
 # Check if there are data overrides
 if [ -d "run/data/sqlite200" ]; then
@@ -244,26 +264,12 @@ else
     cp -r $cond_directory .
     ls -R data
 fi
-# 
-# Further check if there is a pool conditions override
-if [[ -d "data/poolcond" ]]; then
-    echo "Local POOL directory found!"
-    echo "Change ATLAS_POOLCOND_PATH"
-    echo " from $ATLAS_POOLCOND_PATH"
-    export ATLAS_POOLCOND_PATH=`pwd -P`/data
-    echo " to $ATLAS_POOLCOND_PATH"
-else
-    echo "No local pool files found, use default:"
-    echo " $ATLAS_POOLCOND_PATH"
-fi
-echo "Final ATLAS_POOLCOND_PATH: $ATLAS_POOLCOND_PATH"
-#
 #
 export EOS_MGM_URL=root://eospublic.cern.ch
 #
 # Run job
 #
-faser_ntuple_maker.py $last_file_str $partialstr $tagstr $ismc --slice $slice --files $nfiles $mergestr $flukastr $geniestr $unblindstr $stablestr $dir_path
+faser_ntuple_maker.py $last_file_str $partialstr $tagstr $ismc --slice $slice --files $nfiles $mergestr $dir_path "$@"
 ntup_code=$?
 echo "Return code: $ntup_code"
 #