diff --git a/Event/EventPacker/src/component/BufferUnpackers.cpp b/Event/EventPacker/src/component/BufferUnpackers.cpp
index 4ad2731e7cfcdc6147a3ad7caa722d311e7ac9e6..17d2bc27663ffb2ab906c7a9b3c85c18fc2f0557 100644
--- a/Event/EventPacker/src/component/BufferUnpackers.cpp
+++ b/Event/EventPacker/src/component/BufferUnpackers.cpp
@@ -53,6 +53,7 @@ namespace DataPacking::Buffer {
   DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::Particle::Container>, "ParticleUnpacker" )
   DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::Particle::Selection>, "ParticleSelectionUnpacker" )
   DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::Track::Container>, "TrackUnpacker" )
+  DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::Track::Selection>, "TrackSelectionUnpacker" )
   DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::FlavourTag::Container>, "FlavourTagUnpacker" )
   DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::CaloHypo::Container>, "CaloHypoUnpacker" )
   DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::CaloCluster::Container>, "CaloClusterUnpacker" )
@@ -61,6 +62,7 @@ namespace DataPacking::Buffer {
   DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::WeightsVector::Container>, "WeightsVectorUnpacker" )
   DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::RecSummary>, "RecSummaryUnpacker" )
   DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::ProtoParticle::Container>, "ProtoParticleUnpacker" )
+  DECLARE_COMPONENT_WITH_ID( Unpack<LHCb::ProtoParticle::Selection>, "ProtoParticleSelectionUnpacker" )
 
   // SOA unpackers
   DECLARE_COMPONENT_WITH_ID( SOA::Unpack<LHCb::Event::v3::Tracks>, "SOATrackUnpacker" )
diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py
index 499ae232d8d89c0f77c0483a0471315ca39f36be..a3557ff06cd342580848e733cc432eea851d8d20 100644
--- a/GaudiConf/python/GaudiConf/reading.py
+++ b/GaudiConf/python/GaudiConf/reading.py
@@ -28,7 +28,7 @@ def type_map():
         "ParticlesSelection",
         "SharedObjectsContainer<LHCb::ProtoParticle>":
         "ProtoParticlesSelection",
-        "SharedObjectsContainer<LHCb::Event::v1::Track >":
+        "SharedObjectsContainer<LHCb::Event::v1::Track>":
         "TracksSelection",
         "SharedObjectsContainer<LHCb::RichPID>":
         "RichPIDsSelection",
diff --git a/PyConf/python/PyConf/packing.py b/PyConf/python/PyConf/packing.py
index 301e3c292e5c980743ca81cc9622d68a6cbc8804..aefbd7431a64a8e9c1c46dd90ccd4d42898a0f89 100644
--- a/PyConf/python/PyConf/packing.py
+++ b/PyConf/python/PyConf/packing.py
@@ -108,10 +108,11 @@ def unpackers_map():
 
     from PyConf.Algorithms import (
         RecVertexUnpacker, PrimaryVertexUnpacker, TrackUnpacker,
-        RichPIDUnpacker, MuonPIDUnpacker, GlobalChargedPIDUnpacker,
-        NeutralPIDUnpacker, CaloChargedPIDUnpacker, BremInfoUnpacker,
-        CaloHypoUnpacker, CaloClusterUnpacker, CaloDigitUnpacker,
-        CaloAdcUnpacker, ProtoParticleUnpacker, ParticleUnpacker,
+        TrackSelectionUnpacker, RichPIDUnpacker, MuonPIDUnpacker,
+        GlobalChargedPIDUnpacker, NeutralPIDUnpacker, CaloChargedPIDUnpacker,
+        BremInfoUnpacker, CaloHypoUnpacker, CaloClusterUnpacker,
+        CaloDigitUnpacker, CaloAdcUnpacker, ProtoParticleUnpacker,
+        ProtoParticleSelectionUnpacker, ParticleUnpacker,
         ParticleSelectionUnpacker, VertexUnpacker, FlavourTagUnpacker,
         P2VRelationUnpacker, P2MCPRelationUnpacker, PP2MCPRelationUnpacker,
         P2IntRelationUnpacker, P2InfoRelationUnpacker, RecSummaryUnpacker,
@@ -120,6 +121,7 @@ def unpackers_map():
 
     return {
         "Tracks": TrackUnpacker,
+        "TracksSelection": TrackSelectionUnpacker,
         "RichPIDs": RichPIDUnpacker,
         "CaloChargedPIDs": CaloChargedPIDUnpacker,
         "BremInfos": BremInfoUnpacker,
@@ -133,6 +135,7 @@ def unpackers_map():
         "LightPVs": PrimaryVertexUnpacker,
         "PVs": RecVertexUnpacker,
         "ProtoParticles": ProtoParticleUnpacker,
+        "ProtoParticlesSelection": ProtoParticleSelectionUnpacker,
         "Particles": ParticleUnpacker,
         "ParticlesSelection": ParticleSelectionUnpacker,
         "Vertices": VertexUnpacker,
diff --git a/PyConf/python/PyConf/reading.py b/PyConf/python/PyConf/reading.py
index 98244c1354938b42dae5c89b0f420c9038f5b4c0..f40da1505439f37de5fb3a3dcb9ed4db9b0f68dc 100644
--- a/PyConf/python/PyConf/reading.py
+++ b/PyConf/python/PyConf/reading.py
@@ -22,7 +22,7 @@ import os
 
 
 @configurable
-def tes_root(*, input_process: InputProcessTypes):
+def tes_root(*, input_process: InputProcessTypes = InputProcessTypes.Hlt2):
     """
   Get the ROOT_TES location from the input_process type
   """
@@ -39,7 +39,7 @@ def tes_root(*, input_process: InputProcessTypes):
 
 
 @configurable
-def source_id(*, input_process: InputProcessTypes):
+def source_id(*, input_process: InputProcessTypes = InputProcessTypes.Hlt2):
     """
   Get the ROOT_TES location from the input_process type
   """
@@ -67,7 +67,7 @@ def upfront_reconstruction(
 
     """
 
-    stream = tes_root(input_process)
+    stream = tes_root()
 
     reco_loc = reco_locations(stream)
 
@@ -86,14 +86,12 @@ def upfront_reconstruction(
     ### TODO:FIXME take advantage of the fact that the above have datahandles...
     # i.e. should _not_ have to return decoder here, and should just return the _output handles_ and not the algorithms
     # i.e. `upfront_reconstruction` should be a drop-in replacement for `reconstruction()`, with the same return type
-    return [
-        dstdata_filter(source=source_id(input_process)),
-        upfront_decoder(source=source_id(input_process)).producer
-    ] + mc_algs + unpackers
+    return [dstdata_filter(), upfront_decoder().producer] + mc_algs + unpackers
 
 
 @configurable
-def dstdata_filter(source: HltSourceID, raw_banks=default_raw_banks):
+def dstdata_filter(source: HltSourceID = InputProcessTypes.Hlt2,
+                   raw_banks=default_raw_banks):
     """
     Setting a `RawBankSizeFilter` filter on the DstData bank size.
 
@@ -225,7 +223,7 @@ def get_mc_vertices(location):
 
 @configurable
 def reconstruction(*,
-                   input_process: InputProcessTypes = "Hlt2",
+                   input_process: InputProcessTypes = InputProcessTypes.Hlt2,
                    simulation: bool = False,
                    packable: bool = True):
     stream = tes_root(input_process=input_process)
@@ -382,91 +380,37 @@ def postprocess_unpacked_data(data,
 
     # per track type splitter (to convert 0.0 persistency to latest)
     if persistreco_version() == 0.0:
-        if not packable:
-            # use Selections (SharedObjectsContainers, so pointer remain valid to underlying objects, but not packable YET)
-            # effectively front end only changed in terms of persistency locations
-            from PyConf.Algorithms import TracksSharedSplitterPerType, ProtosSharedSplitterPerTrackType
-            tracks_splitter = TracksSharedSplitterPerType(
-                name='TrackContainerSharedSplitterPerType_{hash}',
-                InputTracks=data['Tracks'])
-            data['LongTracks'] = tracks_splitter.LongTracks
-            data['DownstreamTracks'] = tracks_splitter.DownstreamTracks
-            data['UpstreamTracks'] = tracks_splitter.UpstreamTracks
-            data['Ttracks'] = tracks_splitter.Ttracks
-            data['VeloTracks'] = tracks_splitter.VeloTracks
-            # for protoparticles
-            protos_splitter = ProtosSharedSplitterPerTrackType(
-                name='ChargedProtoParticleSharedSplitterPerType_{hash}',
-                InputProtos=data['ChargedProtos'])
-            data['LongProtos'] = protos_splitter.LongProtos
-            data['DownstreamProtos'] = protos_splitter.LongProtos
-            data['UpstreamProtos'] = protos_splitter.LongProtos
-        else:
-            # use copies (as SharedObjectsContainer is not packable yet)
-            # this can lead to weird/unwanted duplication if one will persist these objects subsequently (e.g. in a Sprucing step)
-            print(
-                "NB: objects will be copied to latest 'persistreco_version'"\
-                ", this might lead to unwanted duplication in a subsequent packing step"
-            )
-            # FIXME when this is possible, move only to the SharedObjectsContainers and set these to their persistable locations
-            from PyConf.Algorithms import ChargedProtoParticleFilteredCopyAlg, TrackContainerFilteredCopy
-            import Functors as F
-            split_containers = {}
-            track_predicates = {
-                'Long': F.TRACKISLONG,
-                'Downstream': F.TRACKISDOWNSTREAM,
-                'Upstream': F.TRACKISUPSTREAM
-            }
-            for track_type, track_pred in track_predicates.items():
-                split_containers[
-                    track_type] = ChargedProtoParticleFilteredCopyAlg(
-                        name=f'ChargedProtoParticleFilteredCopyAlg_{track_type}'
-                        + '_{hash}',
-                        InputProtos=data['ChargedProtos'],
-                        TrackPredicate=track_pred,
-                        outputs={
-                            'OutputProtos':
-                            persistable_location(f'{track_type}Protos'),
-                            'OutputTracks':
-                            persistable_location(f'{track_type}Tracks'),
-                            'OutputRichPIDs':
-                            None,
-                            'OutputMuonPIDs':
-                            None,
-                            'OutputMuonTracks':
-                            None,
-                            'OutputBremInfos':
-                            None,
-                            'OutputCaloChargedPIDs':
-                            None,
-                            'OutputGlobalChargedPIDs':
-                            None
-                        })
-                data[f'{track_type}Protos'] = split_containers[
-                    track_type].OutputProtos
-                data[f'{track_type}Tracks'] = split_containers[
-                    track_type].OutputTracks
-                data[f'{track_type}RichPIDs'] = split_containers[
-                    track_type].OutputRichPIDs
-                data[f'{track_type}MuonPIDs'] = split_containers[
-                    track_type].OutputMuonPIDs
-                data[f'{track_type}MuonTracks'] = split_containers[
-                    track_type].OutputMuonTracks
-            # separate handling of T-tracks and Velo tracks
-            data['Ttracks'] = TrackContainerFilteredCopy(
-                name='TrackContainerFilteredCopy_Ttracks_{hash}',
-                Inputs=[data['Tracks']],
-                Selection=F.TRACKISTTRACK,
-                outputs={
-                    'Output': persistable_location('Ttracks')
-                }).Output
-            data['VeloTracks'] = TrackContainerFilteredCopy(
-                name='TrackContainerFilteredCopy_VeloTracks_{hash}',
-                Inputs=[data['Tracks']],
-                Selection=F.TRACKISVELO,
-                outputs={
-                    'Output': persistable_location('VeloTracks')
-                }).Output
+        # use Selections (SharedObjectsContainers, so pointer remain valid to underlying objects, but not packable YET)
+        # effectively front end only changed in terms of persistency locations
+        from PyConf.Algorithms import TracksSharedSplitterPerType, ProtosSharedSplitterPerTrackType
+        tracks_splitter = TracksSharedSplitterPerType(
+            name='TrackContainerSharedSplitterPerType_{hash}',
+            InputTracks=data['Tracks'],
+            outputs={
+                "LongTracks": persistable_location('LongTracks'),
+                "DownstreamTracks": persistable_location('DownstreamTracks'),
+                "UpstreamTracks": persistable_location('UpstreamTracks'),
+                "Ttracks": persistable_location('Ttracks'),
+                "VeloTracks": persistable_location('VeloTracks')
+            })
+        data['LongTracks'] = tracks_splitter.LongTracks
+        data['DownstreamTracks'] = tracks_splitter.DownstreamTracks
+        data['UpstreamTracks'] = tracks_splitter.UpstreamTracks
+        data['Ttracks'] = tracks_splitter.Ttracks
+        data['VeloTracks'] = tracks_splitter.VeloTracks
+        # for protoparticles
+        protos_splitter = ProtosSharedSplitterPerTrackType(
+            name='ChargedProtoParticleSharedSplitterPerType_{hash}',
+            InputProtos=data['ChargedProtos'],
+            outputs={
+                "LongProtos": persistable_location('LongProtos'),
+                "DownstreamProtos": persistable_location('DownstreamProtos'),
+                "UpstreamProtos": persistable_location('UpstreamProtos'),
+                "TtrackProtos": None,
+            })
+        data['LongProtos'] = protos_splitter.LongProtos
+        data['DownstreamProtos'] = protos_splitter.DownstreamProtos
+        data['UpstreamProtos'] = protos_splitter.UpstreamProtos
 
     ### Temporary: This to be compatible with data where the RecSummary does not exist.
     if "RecSummary" not in data.keys():