diff --git a/Bp2JpsiPLambdabar/dv_data.py b/Bp2JpsiPLambdabar/dv_data.py
new file mode 100644
index 0000000000000000000000000000000000000000..2effd6694fba9ce8ef79c4791889c6c4bd32ec61
--- /dev/null
+++ b/Bp2JpsiPLambdabar/dv_data.py
@@ -0,0 +1,15 @@
+from .template import *
+
+def Bp2JpsipLambdabar_LL():
+    algs = template(["B+", "J/psi(1S)", "p+", "Lambda~0"], True, "Bp2JpsipLambdabar_LL")
+    return algs
+
+def Bp2JpsipLambdabar_DD():
+    algs = template(["B+", "J/psi(1S)", "p+", "Lambda~0_DD"], True, "Bp2JpsipLambdabar_DD")
+    return algs
+
+def entry_point(options: Options):
+    algs_merged = Bp2JpsipLambdabar_LL()
+    algs_merged.update(Bp2JpsipLambdabar_DD())
+
+    return make_config(options, algs_merged)
diff --git a/Bp2JpsiPLambdabar/info.yaml b/Bp2JpsiPLambdabar/info.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..e65dd5233304e2cfedb4e71a5153ab4e7afc68d5
--- /dev/null
+++ b/Bp2JpsiPLambdabar/info.yaml
@@ -0,0 +1,45 @@
+defaults:
+    application: DaVinci/v64r9
+    wg: BandQ
+    inform:
+        - chenlong.wang@cern.ch
+        - zan.ren@cern.ch
+        - j.fu@cern.ch
+
+# data configuration
+{%- set datasets = [
+  ('Collision24', '24c2', 'Sprucing24c2/90000000', 'BANDQ', 'MagUp', 'VeloClosed', 'Spruce', 'bandq'),
+  ('Collision24', '24c2', 'Sprucing24c2/90000000', 'BANDQ', 'MagDown', 'VeloClosed', 'Spruce', 'bandq'),
+  ('Collision24', '24c3', 'Sprucing24c3/90000000', 'BANDQ', 'MagUp', 'VeloClosed', 'Spruce', 'bandq'),
+  ('Collision24', '24c3', 'Sprucing24c3/90000000', 'BANDQ', 'MagDown', 'VeloClosed', 'Spruce', 'bandq'),
+  ('Collision24', '24c4', 'Sprucing24c4/90000000', 'BANDQ', 'MagUp', 'VeloClosed', 'Spruce', 'bandq'),
+  ('Collision24', '24c4', 'Sprucing24c4/90000000', 'BANDQ', 'MagDown', 'VeloClosed', 'Spruce', 'bandq'),  
+]%}
+
+
+{%- set dv_platform = "x86_64_v2-el9-clang16-opt" %}
+
+{%- for data, SprucingTag, Type, wg, polarity, Velo, process, stream in datasets %}
+                                                                                                                                         
+data_{{wg}}_{{process}}_{{data}}_{{Velo}}_{{polarity}}_{{SprucingTag}}:
+    application: "DaVinci/v64r9@{{dv_platform}}"
+    input:
+      bk_query: "/LHCb/{{data}}/Beam6800GeV-{{Velo}}-{{polarity}}/Real Data/{{Type}}/{{wg}}.DST"
+      dq_flags:
+        - OK
+        - UNCHECKED
+      keep_running: True
+      n_test_lfns: 1
+    options:
+      entrypoint: Bp2JpsiPLambdabar.dv_data:entry_point
+      extra_options:
+        input_type: ROOT
+        input_process: "{{process}}"
+        input_stream: "{{stream}}"
+        input_raw_format: 0.5
+        simulation: False
+        data_type: "Upgrade"
+        geometry_version: run3/2024.Q1.2-v00.00
+        conditions_version: master
+    output: Data_{{process}}_{{stream}}_{{SprucingTag}}.ROOT
+{%- endfor %}
diff --git a/Bp2JpsiPLambdabar/template.py b/Bp2JpsiPLambdabar/template.py
new file mode 100644
index 0000000000000000000000000000000000000000..9044186817b09439ccce6c7aacee1dc25655a524
--- /dev/null
+++ b/Bp2JpsiPLambdabar/template.py
@@ -0,0 +1,191 @@
+from .tools.tupling_maker import *
+from .tools.descriptor_writer import *
+
+from GaudiKernel.SystemOfUnits import GeV, MeV, mm, meter, picosecond
+
+from RecoConf.reconstruction_objects import make_pvs as _make_pvs
+
+from Hlt2Conf.standard_particles import make_long_kaons, make_long_protons, make_long_pions, make_LambdaLL, make_long_muons, make_phi2kk, make_down_pions, make_down_protons 
+from Hlt2Conf.standard_particles import make_LambdaDD, make_KsDD, make_KsLL, _make_particles, _make_V0LL, _make_V0DD, standard_protoparticle_filter 
+from Hlt2Conf.algorithms_thor import ParticleFilter
+
+import Functors as F
+
+masses = {
+    'pi0': 134.9768 *
+    MeV,  # +/- 0.0005 PDG, P.A. Zyla et al. (Particle Data Group), Prog. Theor. Exp. Phys. 2020, 083C01 (2020) and 2021 update.
+    'KS0': 497.611 * MeV,  # +/- 0.013, PDG, PR D98, 030001 and 2019 update
+    'Lambda0':
+    1115.683 * MeV,  # +/- 0.006, PDG, PR D98, 030001 and 2019 update
+    'J/psi(1S)': 3096.900 *
+    MeV,  # +/- 0.006 PDG, P.A. Zyla et al. (Particle Data Group), Prog. Theor. Exp. Phys. 2020, 083C01 (2020) and 2021 update.
+}
+
+def _make_long_for_V0_loose(particles, pvs):
+    code = F.require_all(F.MINIPCHI2(pvs) > 9)
+    return ParticleFilter(particles, F.FILTER(code))
+
+def _make_down_for_V0_loose(particles):
+    code = F.require_all(F.P > 0 * MeV, F.PT > 0 * MeV)
+    return ParticleFilter(particles, F.FILTER(code))
+
+def make_long_pions_for_V0_loose():
+    return _make_long_for_V0_loose(make_long_pions(), get_pvs())
+
+def make_long_protons_for_V0_loose():
+    return _make_long_for_V0_loose(make_long_protons(), get_pvs())
+
+def make_down_pions_for_V0_loose():
+    return _make_down_for_V0_loose(make_down_pions())
+
+def make_down_protons_for_V0_loose():
+    return _make_down_for_V0_loose(make_down_protons())
+
+def make_KsDD_loose(pions_down=None):
+    if pions_down is None:
+        pions_down = make_down_pions_for_V0_loose()
+    descriptors = "KS0 -> pi+ pi-"
+    return _make_V0DD(
+        particles=[pions_down,pions_down],
+        descriptors=descriptors,
+        pv_maker=_make_pvs,
+        name='std_make_V0DD_loose_{hash}',
+        bpvvdz_min=100 * mm)
+
+def make_LambdaDD_loose():
+    pions = make_down_pions_for_V0_loose()
+    protons = make_down_protons_for_V0_loose()
+    descriptors = "[Lambda0 -> p+ pi-]cc"
+    return _make_V0DD(
+        particles=[protons, pions],
+        descriptors=descriptors,
+        pv_maker=_make_pvs,
+        name='std_make_V0DD_loose_{hash}',
+        nominal_mass=masses['Lambda0'],
+        am_dmass=80 * MeV,
+        m_dmass=24 * MeV,
+        vchi2pdof_max=30,
+        bpvvdz_min=100 * mm)
+
+def make_LambdaLL_loose():
+    pions = make_long_pions_for_V0_loose()
+    protons = make_long_protons_for_V0_loose()
+    descriptors = "[Lambda0 -> p+ pi-]cc"
+    return _make_V0LL(
+        particles=[protons, pions],
+        descriptors=descriptors,
+        pv_maker=_make_pvs,
+        nominal_mass=masses['Lambda0'],
+        am_dmass=50 * MeV,
+        m_dmass=20 * MeV,
+        vchi2pdof_max=30,
+        bpvltime_min=2.0 * picosecond)
+
+def template(particles, cc, directory):
+
+    ##### Default inputs
+    sprucing_line = 'SpruceBandQ_JpsiToMuMuDetached'
+    Jpsi = get_particles(f"/Event/Spruce/{sprucing_line}/Particles")
+
+    rec_sum = get_rec_summary()
+    v2_pvs = get_pvs()
+    odin = get_odin()
+    decreports = None
+    
+    long_pions = make_long_pions()
+    long_kaons = make_long_kaons()
+    long_protons = make_long_protons()
+    
+    DaughterCutp = "(GHOSTPROB<0.5) & (PROBNN_P>0.1)"
+    DaughterCutpi = "(BPVIPCHI2(get_pvs())>9) & (PT>250*MeV) & (PID_K<5) & (GHOSTPROB<0.5)"
+    DaughterCutK = "(BPVIPCHI2(get_pvs())>9) & (PT>250*MeV) & (PID_K>5) & (GHOSTPROB<0.5)"
+    
+    protons = ParticleFilter( long_protons, Cut = F.FILTER( convert_cut(DaughterCutp) ) )
+    kaons = ParticleFilter( long_kaons, Cut = F.FILTER( convert_cut(DaughterCutK) ) )
+    pions = ParticleFilter( long_pions, Cut = F.FILTER( convert_cut(DaughterCutpi) ) )
+    muons = make_long_muons()
+    Lambda = make_LambdaLL_loose()
+    Lambda_DD = make_LambdaDD_loose()
+    KS0 = make_KsLL()
+    KS0_DD = make_KsDD_loose()
+    phi = make_phi2kk(kaon_pidk_min=5)
+
+    input_dict = {"J/psi(1S)": Jpsi, "phi(1020)": phi,
+                  "Lambda0":Lambda, "Lambda~0":Lambda,
+                  "pi+": pions, "pi-": pions,
+                  "K+": kaons, "K-": kaons,
+                  "p+": protons, "p~-": protons,
+                  "mu+": muons, "mu~-": muons,
+                  "Lambda0_DD":Lambda_DD, "Lambda~0_DD":Lambda_DD,
+                  "KS0":KS0, "KS0_DD":KS0_DD,
+                 }
+
+    data_list = [input_dict[particle] for particle in particles[1:]]
+    # create particle containers
+
+    for i in range(len(particles)):
+        if particles[i][-3:] == "_DD":
+            particles[i] = particles[i][:-3]
+            #print(particles[i])
+
+    B2JpsiX = ParticleCombiner(
+            Inputs = data_list,
+            DecayDescriptor = descriptor(particles, cc),
+            name = "JpsiToMuMu_Detached_line_validation_{hash}",
+            CombinationCut = convert_cut("in_range(5000, MASS, 5600)"),
+            #CombinationCut = convert_cut("in_range(5200, MASS, 7000)"),
+            CompositeCut = convert_cut("in_range(5000, MASS, 5600) & BPVLTIME(get_pvs())>0.2*picosecond & BPVVDZ(get_pvs())>0.*mm & BPVDIRA(get_pvs())>0.99 & BPVIPCHI2(get_pvs())<25 & CHI2DOF<25")
+    )
+
+    from pprint import pprint
+    print("++++++++++++++++++++++++++++++++++++")
+    pprint(descriptor(particles, cc))
+    #FunTuple: define branches.
+
+    hlt1_trigger_lines = [ 'Hlt1TrackMVADecision',
+                           'Hlt1TwoTrackMVADecision',
+                           'Hlt1D2KKDecision',
+                           'Hlt1D2KPiDecision', 
+                           'Hlt1D2PiPiDecision',
+                           'Hlt1DiMuonHighMassDecision', 
+                           'Hlt1DiMuonLowMassDecision',
+                           'Hlt1DiMuonSoftDecision',
+                           'Hlt1KsToPiPiDecision',
+                           'Hlt1LowPtMuonDecision',
+                           'Hlt1LowPtDiMuonDecision',
+                           'Hlt1SingleHighPtMuonDecision',
+                           'Hlt1TrackMuonMVADecision']
+
+
+    line_prefilter = create_lines_filter(name=f"PreFilter_{sprucing_line}_{hash}", lines=[sprucing_line])
+    evt_vars = event_variables(v2_pvs, odin, decreports, rec_sum, hlt1_trigger_lines, [sprucing_line])
+    candidate_vars = candidate_variables(v2_pvs, particles)
+    tistos_vars = tistos_variables(hlt1_trigger_lines, ["Hlt2Topo2BodyDecision", "Hlt2Topo3BodyDecision", "Hlt2_JpsiToMuMuDetachedFullDecision", "Hlt2_Psi2SToMuMuDetachedFull"], Jpsi, False)
+
+    composite_particles = [ particle for particle in particles if (particle_df["type"][particle] != "basic") and (particle != "phi(1020)") ]
+    dtf_vars1 = make_dtf_variables(v2_pvs, B2JpsiX, particles, True, composite_particles, directory)
+    dtf_vars2 = make_dtf_variables(v2_pvs, B2JpsiX, particles, True, composite_particles[1:], directory)
+
+    for particle in particles:
+        if particle_df["type"][particle] != "basic":
+            candidate_vars[particle_df["abbr"][particle]] += tistos_vars
+
+    for key in candidate_vars.keys():
+        candidate_vars[key] += dtf_vars1[key]
+        candidate_vars[key] += dtf_vars2[key]
+
+    B2JpsiX_branches = default_branches(particles, cc)
+    pprint(B2JpsiX_branches)
+
+    # define tupling algorithms
+    B2JpsiX_tuple = FunTuple_Particles( name=directory,
+                                         inputs=B2JpsiX,
+                                         tuple_name="DecayTree",
+                                         fields=B2JpsiX_branches,
+                                         variables=candidate_vars,
+                                         store_multiple_cand_info = True,
+                                         event_variables = evt_vars)
+
+    algs = { f"{directory}_tuple":[ B2JpsiX_tuple, line_prefilter ],}
+            
+    return algs
diff --git a/Bp2JpsiPLambdabar/tools/descriptor_writer.py b/Bp2JpsiPLambdabar/tools/descriptor_writer.py
new file mode 100644
index 0000000000000000000000000000000000000000..cfbd2acc901bf5402cc72bd489598872d31e8e80
--- /dev/null
+++ b/Bp2JpsiPLambdabar/tools/descriptor_writer.py
@@ -0,0 +1,153 @@
+import pandas as pd
+import numpy as np
+particle_dict = {"Lambda_b0": {"abbr": "Lb", "type": "toplevel", "daughters":[]},
+                 "Xi_b-":     {"abbr": "Xibm", "type": "toplevel", "daughters":[]},
+                 "Xi_b0":     {"abbr": "Xib0", "type": "toplevel", "daughters":[]},
+                 "B0":        {"abbr": "B0", "type": "toplevel", "daughters":[]},
+                 "B+":        {"abbr": "Bp", "type": "toplevel", "daughters":[]},
+                 "B-":        {"abbr": "Bm", "type": "toplevel", "daughters":[]},
+                 "B_s0":      {"abbr": "Bs", "type": "toplevel", "daughters":[]},
+                 "J/psi(1S)": {"abbr": "Jpsi", "type": "composite", "daughters": ["mu+", "mu-"]},
+                 "psi(2S)":   {"abbr": "psi2S", "type": "composite", "daughters": ["mu+", "mu-"]},
+                 "phi(1020)": {"abbr": "phi", "type": "composite", "daughters": ["K+", "K-"]},
+                 "Lambda0":   {"abbr": "Lambda", "type": "composite", "daughters": ["p+", "pi-"]},
+                 "Lambda~0":  {"abbr": "Lambda", "type": "composite", "daughters": ["p~-", "pi+"]},
+                 "KS0":       {"abbr": "KS0", "type": "composite", "daughters": ["pi+", "pi-"]},
+                 "mu+":       {"abbr": "mup", "type": "basic", "daughters":[]},
+                 "mu-":       {"abbr": "mum", "type": "basic", "daughters":[]},
+                 "e+":        {"abbr": "ep", "type": "basic", "daughters":[]},
+                 "e-":        {"abbr": "em", "type": "basic", "daughters":[]},
+                 "pi+":       {"abbr": "Pip", "type": "basic", "daughters":[]},
+                 "pi-":       {"abbr": "Pim", "type": "basic", "daughters":[]},
+                 "K+":        {"abbr": "Kp", "type": "basic", "daughters":[]},
+                 "K-":        {"abbr": "Km", "type": "basic", "daughters":[]},
+                 "p+":        {"abbr": "Pp", "type": "basic", "daughters":[]},
+                 "p~-":       {"abbr": "Pm", "type": "basic", "daughters":[]},
+            }
+particle_df = pd.DataFrame(particle_dict).T
+
+def descriptor(particles, cc=False):
+    decay_descriptor = f"{particles[0]} ->"
+    for particle in particles[1:]:
+        decay_descriptor += f" {particle}"
+    if cc: decay_descriptor = f"[{decay_descriptor}]cc"
+    return decay_descriptor
+
+def decay_branches(mother, daughters, decay_descriptor):
+    psi_pos = 0
+    phi_pos = 0
+    Lambda_pos = 0
+    KS0_pos = 0
+    Pm_pos = Pp_pos = 0
+    Km_pos = Kp_pos = 0
+    Pim_pos = Pip_pos = 0
+    mum_pos = mup_pos = 0
+    em_pos = ep_pos = 0
+
+    #if decay_descriptor[0] == "[" or decay_descriptor[-3:] == "]cc":
+    if "cc" in decay_descriptor[-5:]:
+        branch_descriptor = decay_descriptor[:-2] + "CC"
+    else: branch_descriptor = decay_descriptor
+    #if "J/psi(1S)" in decay_descriptor: 
+    #    branch_descriptor = branch_descriptor.replace("J/psi(1S)", "(J/psi(1S) -> mu+ mu-)")
+    #if "psi(2S)" in decay_descriptor: 
+    #    branch_descriptor = branch_descriptor.replace("psi(2S)", "(psi(2S) -> mu+ mu-)")
+    #if "Lambda0" in decay_descriptor: 
+    #    branch_descriptor = branch_descriptor.replace("Lambda0", "(Lambda0 -> p+ pi-)")
+    #if "Lambda~0" in decay_descriptor: 
+    #    branch_descriptor = branch_descriptor.replace("Lambda~0", "(Lambda~0 -> p~- pi+)")
+    for comp_par in particle_df.query("type=='composite'").index:
+        if comp_par in decay_descriptor:
+            #branch_descriptor = branch_descriptor.replace(comp_par, comp_par + "->" + " ".join(particle_df["daughters"][comp_par]))
+            branch_descriptor = branch_descriptor.replace(comp_par, f'({comp_par} -> {" ".join(particle_df["daughters"][comp_par])})')
+
+    branches = {mother: branch_descriptor}
+
+    for daughter in daughters:
+        if "psi" in daughter:
+            true_pos = branch_descriptor.rfind("(", 0, branch_descriptor.find("psi", psi_pos))
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            psi_pos = branch_descriptor.find("psi", psi_pos) + len("psi(nS)")
+        if "phi" in daughter:
+            true_pos = branch_descriptor.rfind("(", 0, branch_descriptor.find("phi", phi_pos))
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            phi_pos = branch_descriptor.find("phi", phi_pos) + len("phi(1020)")
+        if "Lambda" in daughter:
+            true_pos = branch_descriptor.rfind("(", 0, branch_descriptor.find("Lambda", Lambda_pos))
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            Lambda_pos = branch_descriptor.find("Lambda", Lambda_pos) + len("Lambda~0")
+        if "KS0" in daughter:
+            true_pos = branch_descriptor.rfind("(", 0, branch_descriptor.find("KS0", KS0_pos))
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            KS0_pos = branch_descriptor.find("KS0", KS0_pos) + len("KS0")
+        if "Pp" in daughter:
+            true_pos = branch_descriptor.find("p+", Pp_pos)
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            Pp_pos = true_pos + len("p+")
+        if "Pm" in daughter:
+            true_pos = branch_descriptor.find("p~-", Pm_pos)
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            Pm_pos = true_pos + len("p~-")
+        if "Kp" in daughter:
+            true_pos = branch_descriptor.find("K+", Kp_pos)
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            Kp_pos = true_pos + len("K+")
+        if "Km" in daughter:
+            true_pos = branch_descriptor.find("K-", Km_pos)
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            Km_pos = true_pos + len("K-")
+        if "Pip" in daughter or "Hp" in daughter:
+            true_pos = branch_descriptor.find("pi+", Pip_pos)
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            Pip_pos = true_pos + len("pi+")
+        if "Pim" in daughter or "Hm" in daughter:
+            true_pos = branch_descriptor.find("pi-", Pim_pos)
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            Pim_pos = true_pos + len("pi-")
+        if "mup" in daughter:
+            true_pos = branch_descriptor.find("mu+", mup_pos)
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            mup_pos = true_pos + len("mu+")
+        if "mum" in daughter:
+            true_pos = branch_descriptor.find("mu-", mum_pos)
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            mum_pos = true_pos + len("mu-")
+        if "ep" in daughter:
+            true_pos = branch_descriptor.find("e+", ep_pos)
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            ep_pos = true_pos + len("e+")
+        if "em" in daughter:
+            true_pos = branch_descriptor.find("e-", em_pos)
+            branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
+            em_pos = true_pos + len("e-")
+    return branches
+
+def all_particles(particles):
+    all_pars = []
+    for particle in particles:
+        all_pars += ( [ particle ] + particle_df["daughters"][particle])
+    return all_pars
+
+#print(all_particles(["B_s0","J/psi(1S)", "p+", "p~-"]))
+
+def default_names(particles):
+    names = []
+    for particle in particles:
+        abbr = particle_df["abbr"][ particle ]
+        names.append( abbr )
+        names += [f"{daughter_abbr}_{abbr}" for daughter_abbr in particle_df["abbr"][particle_df["daughters"][particle]]]
+    return names
+
+def default_branches(particles, cc=False):
+    names = default_names(particles)
+    return decay_branches(names[0], names[1:], descriptor(particles, cc))
+
+#from pprint import pprint
+#branches = default_branches( ["B+","J/psi(1S)", "KS0", "pi+"], True)
+#pprint(branches)
+
+#branches = decay_branches("Bs", ["Jpsi", "Pp", "Pm", "mup", "mum"], "[B_s0 -> J/psi(1S) p+ p~-]cc")
+#print(branches)
+
+#branches = default_names( ["B_s0","J/psi(1S)", "p+", "p~-"])
+#print(descriptor(["Lambda_b0", "J/psi(1S)", "p+", "K-"], True))
diff --git a/Bp2JpsiPLambdabar/tools/tupling_maker.py b/Bp2JpsiPLambdabar/tools/tupling_maker.py
new file mode 100644
index 0000000000000000000000000000000000000000..f118601c7be2fcf78a9604da84c7c7654d1211ee
--- /dev/null
+++ b/Bp2JpsiPLambdabar/tools/tupling_maker.py
@@ -0,0 +1,389 @@
+from DaVinci import make_config, Options
+from DaVinci.algorithms import create_lines_filter
+
+from PyConf.reading import get_rec_summary, get_pvs
+from PyConf.reading import get_particles
+
+import FunTuple.functorcollections as FC
+import Functors as F
+from FunTuple import FunctorCollection
+
+from FunTuple import FunTuple_Event
+from FunTuple import FunTuple_Particles
+from .descriptor_writer import *
+
+
+#from PyConf.reading import (get_particles, get_charged_protoparticles, get_pvs, get_rec_summary, _get_unpacked)
+
+from Functors.math import in_range
+from Hlt2Conf.algorithms_thor import ParticleCombiner, ParticleFilter
+
+from RecoConf.event_filters import require_pvs
+
+from Hlt2Conf.standard_particles import get_long_track_selector, standard_protoparticle_filter
+from PyConf.Algorithms import FunctionalParticleMaker
+from PyConf.reading import get_charged_protoparticles as _make_charged_protoparticles 
+from PyConf.reading import get_odin
+
+from GaudiKernel.SystemOfUnits import MeV, picosecond, mm, GeV
+from DecayTreeFitter import DecayTreeFitter
+
+_basic = "basic"
+_composite = "composite"
+_toplevel = "toplevel"
+
+def all_variables(pvs, mctruth, ptype, candidates=None, ftAlg=None):
+
+    if ptype not in [_basic, _composite]:
+        Exception(f"I want {_basic} or {_composite}. Got {ptype}")
+    all_vars = FunctorCollection({})
+
+    comp = _composite == ptype or _toplevel == ptype  # is composite
+    basic = _basic == ptype  # is not composite
+    top = _toplevel == ptype  # the B
+
+    all_vars += FC.Kinematics()
+    if basic:
+        all_vars += FC.ParticleID(extra_info=True)
+
+    if comp:
+        all_vars.update({"ALV": F.ALV(Child1=1, Child2=2)})
+
+    if comp:  # all these require a vertex
+        all_vars.update({"BPVCORRM": F.BPVCORRM(pvs)})
+        all_vars.update({"BPVCORRMERR": F.BPVCORRMERR(pvs)})
+        all_vars.update({"BPVDIRA": F.BPVDIRA(pvs)})
+        all_vars.update({"BPVDLS": F.BPVDLS(pvs)})
+        all_vars.update({"BPVETA": F.BPVETA(pvs)})
+        all_vars.update({"BPVFD": F.BPVFD(pvs)})
+        all_vars.update({"BPVFDCHI2": F.BPVFDCHI2(pvs)})
+        all_vars.update({"BPVFDIR": F.BPVFDIR(pvs)})
+        all_vars.update({"BPVFDVEC": F.BPVFDVEC(pvs)})
+
+    all_vars.update({"BPVIP": F.BPVIP(pvs)})
+    all_vars.update({"BPVIPCHI2": F.BPVIPCHI2(pvs)})
+    all_vars.update({"BPVX": F.BPVX(pvs)})
+    all_vars.update({"BPVY": F.BPVY(pvs)})
+    all_vars.update({"BPVZ": F.BPVZ(pvs)})
+    all_vars.update({"ALLPVX": F.ALLPVX(pvs)})
+    all_vars.update({"ALLPVY": F.ALLPVY(pvs)})
+    all_vars.update({"ALLPVZ": F.ALLPVZ(pvs)})
+
+    if comp:  # all these require a vertex
+        all_vars.update({"ALLPV_FD": F.ALLPV_FD(pvs)})
+        all_vars.update({"ALLPV_IP": F.ALLPV_IP(pvs)})
+        all_vars.update({"BPVLTIME": F.BPVLTIME(pvs)})
+        all_vars.update({"BPVVDRHO": F.BPVVDRHO(pvs)})
+        all_vars.update({"BPVVDX": F.BPVVDX(pvs)})
+        all_vars.update({"BPVVDY": F.BPVVDY(pvs)})
+        all_vars.update({"BPVVDZ": F.BPVVDZ(pvs)})
+
+    all_vars.update({"CHARGE": F.CHARGE})
+    all_vars.update({"CHI2": F.CHI2})
+    all_vars.update({"CHI2DOF": F.CHI2DOF})
+    #if top:  # apply this only to B
+    #    all_vars.update({"CHILD1_PT": F.CHILD(1, F.PT)})  # example of CHILD
+    #    all_vars.update({"Ds_END_VZ": F.CHILD(1, F.END_VZ)})
+    #    all_vars.update({"Delta_END_VZ_DsB0": F.CHILD(1, F.END_VZ) - F.END_VZ})
+
+    if comp:
+        #all_vars.update({"DOCA": F.SDOCA(Child1=1, Child2=2)})
+        #all_vars.update({"DOCACHI2": F.SDOCACHI2(Child1=1, Child2=2)})
+        all_vars.update({"END_VRHO": F.END_VRHO})
+        all_vars.update({"END_VX": F.END_VX})
+        all_vars.update({"END_VY": F.END_VY})
+        all_vars.update({"END_VZ": F.END_VZ})
+
+    # duplicated from FC   all_vars.update({"ENERGY" : F.ENERGY})
+    all_vars.update({"ETA": F.ETA})
+    all_vars.update({"FOURMOMENTUM": F.FOURMOMENTUM})
+    all_vars.update({"ISBASIC": F.ISBASICPARTICLE})
+
+    if basic:
+        all_vars.update({"GHOSTPROB": F.GHOSTPROB})
+        all_vars.update({"ISMUON": F.ISMUON})
+        all_vars.update({"INMUON": F.INMUON})
+        all_vars.update({"INECAL": F.INECAL})
+        all_vars.update({"INHCAL": F.INHCAL})
+        all_vars.update({"HASBREM": F.HASBREM})
+        all_vars.update({"BREMENERGY": F.BREMENERGY})
+        all_vars.update({"BREMBENDCORR": F.BREMBENDCORR})
+        all_vars.update({"BREMPIDE": F.BREMPIDE})
+        all_vars.update({"ECALPIDE": F.ECALPIDE})
+        all_vars.update({"ECALPIDMU": F.ECALPIDMU})
+        all_vars.update({"HCALPIDE": F.HCALPIDE})
+        all_vars.update({"HCALPIDMU": F.HCALPIDMU})
+        all_vars.update({"ELECTRONSHOWEREOP": F.ELECTRONSHOWEREOP})
+        all_vars.update({"CLUSTERMATCH": F.CLUSTERMATCH_CHI2})
+        all_vars.update({"ELECTRONMATCH": F.ELECTRONMATCH_CHI2})
+        all_vars.update({"BREMHYPOMATCH": F.BREMHYPOMATCH_CHI2})
+        all_vars.update({"ELECTRONENERGY": F.ELECTRONENERGY})
+        all_vars.update({"BREMHYPOENERGY": F.BREMHYPOENERGY})
+        all_vars.update({"BREMHYPODELTAX": F.BREMHYPODELTAX})
+        all_vars.update({"ELECTRONID": F.ELECTRONID})
+        all_vars.update({"HCALEOP": F.HCALEOP})
+        all_vars.update({"TRACK_MOM_": F.TRACK_MOMVEC})
+        #all_vars.update({"TRACK_POS_CLOSESTTOBEAM_": F.TRACK_POSVEC_CLOSESTTOBEAM})
+        all_vars.update({"IS_ID_pi": F.IS_ID("pi-")})
+        all_vars.update({"PDG_MASS_pi": F.PDG_MASS("pi+")})
+        all_vars.update({"SIGNED_DELTA_MASS_pi": F.SIGNED_DELTA_MASS("pi+")})
+        all_vars.update({"ABS_DELTA_MASS_pi": F.ABS_DELTA_MASS("pi+")})
+        all_vars.update({"IS_NOT_H": F.IS_NOT_H})
+        all_vars.update({"IS_PHOTON": F.IS_PHOTON})
+        all_vars.update({"PROBNN_P": F.PROBNN_P})
+
+    if comp:
+        all_vars.update({"MAXPT": F.MAX(F.PT)})
+        all_vars.update({"MAXDOCA": F.MAXSDOCA})
+        all_vars.update({"MAXDOCACHI2": F.MAXSDOCACHI2})
+        #all_vars.update({"MINDOCA": F.MINSDOCA})
+        #all_vars.update({"MINDOCACHI2": F.MINSDOCACHI2})
+        # the above in cut versions.
+
+    # duplicated from FC    all_vars.update({ 'MC_MOTHER_ID' : F.VALUE_OR(0) @ mctruth(
+    # duplicated from FC        F.MC_MOTHER(1, F.PARTICLE_ID))})
+
+    if comp:
+        all_vars.update({"MINPT": F.MIN(F.PT)})
+    all_vars.update({"MINIP": F.MINIP(pvs)})
+    all_vars.update({"MINIPCHI2": F.MINIPCHI2(pvs)})
+
+    if basic:
+        all_vars.update({"TRACKPT": F.TRACK_PT})
+        all_vars.update({"TRACKTYPE": F.VALUE_OR(-1) @ F.TRACKTYPE @ F.TRACK})
+        all_vars.update({"TRACKHISTORY": F.VALUE_OR(-1) @ F.TRACKHISTORY @ F.TRACK})
+        all_vars.update({"QOVERP": F.QOVERP @ F.TRACK})
+        all_vars.update({"NDOF": F.VALUE_OR(-1) @ F.NDOF @ F.TRACK})
+        all_vars.update({"NFTHITS": F.VALUE_OR(-1) @ F.NFTHITS @ F.TRACK})
+        all_vars.update({"NHITS": F.VALUE_OR(-1) @ F.NHITS @ F.TRACK})
+        all_vars.update({"NUTHITS": F.VALUE_OR(-1) @ F.NUTHITS @ F.TRACK})
+        all_vars.update({"NVPHITS": F.VALUE_OR(-1) @ F.NVPHITS @ F.TRACK})
+        all_vars.update({"TRACKHASVELO": F.VALUE_OR(-1) @ F.TRACKHASVELO @ F.TRACK})
+        all_vars.update({"TRACKHASUT": F.VALUE_OR(-1) @ F.TRACKHASUT @ F.TRACK})
+
+    all_vars.update({"OBJECT_KEY": F.OBJECT_KEY})
+    all_vars.update({"PHI": F.PHI})
+
+    all_vars.update({"ABS_PX": F.ABS @ F.PX})
+
+    all_vars.update({"REFERENCEPOINT_X": F.REFERENCEPOINT_X})
+    all_vars.update({"REFERENCEPOINT_Y": F.REFERENCEPOINT_Y})
+    all_vars.update({"REFERENCEPOINT_Z": F.REFERENCEPOINT_Z})
+
+    if comp:
+        all_vars.update({"SDOCA12": F.SDOCA(1, 2)})
+        all_vars.update({"SDOCA12_CHI2": F.SDOCACHI2(1, 2)})
+    if basic:
+        all_vars.update({"SHOWER_SHAPE": F.CALO_NEUTRAL_SHOWER_SHAPE})
+
+    if comp:
+        all_vars.update({"SUBCOMB12_MM": F.SUBCOMB(Functor=F.MASS, Indices=(1, 2))})
+        all_vars.update({"SUMPT": F.SUM(F.PT)})
+    if top:
+        all_vars.update({"SDOCA13": F.SDOCA(1, 3)})
+        all_vars.update({"SDOCA13_CHI2": F.SDOCACHI2(1, 3)})
+        all_vars.update({"SDOCA23": F.SDOCA(2, 3)})
+        all_vars.update({"SDOCA23_CHI2": F.SDOCACHI2(2, 3)})
+        all_vars.update({"SUBCOMB13_MM": F.SUBCOMB(Functor=F.MASS, Indices=(1, 3))})
+        all_vars.update({"SUBCOMB23_MM": F.SUBCOMB(Functor=F.MASS, Indices=(2, 3))})
+
+    if basic:
+        all_vars.update({"TX": F.TX})
+        all_vars.update({"TY": F.TY})
+
+    print(f"### For {ptype} returning variables {all_vars.functor_dict.keys()}")
+    return all_vars                                                                                                                      
+
+def tistos_variables(Hlt1_decisions, Hlt2_decisions, data, isturbo):
+    tistos_vars = FunctorCollection({})
+    tistos_vars += FC.HltTisTos( selection_type="Hlt1", trigger_lines=Hlt1_decisions, data=data)
+    if not isturbo:
+        tistos_vars += FC.HltTisTos( selection_type="Hlt2", trigger_lines=Hlt2_decisions, data=data)
+    return tistos_vars
+
+def event_variables(PVs, ODIN, decreports, rec_sum, hlt1_lines, sprucing_lines):
+    """
+    event variables
+    """
+
+    ## Some empty summaries removed
+    evt_vars = FunctorCollection({
+        "nPVs": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nPVs"),
+        "nTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nTracks"),
+        "nLongTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nLongTracks"),
+        "nDownstreamTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nDownstreamTracks"),
+        "nUpstreamTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nUpstreamTracks"),
+        "nVeloTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nVeloTracks"),
+        "nBackTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nBackTracks"),
+        "nGhosts": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nGhosts"),
+        "nRich1Hits": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nRich1Hits"),
+        "nRich2Hits": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nRich2Hits"),
+        #"nVeloClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nVeloClusters"),
+        "nVPClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nVPClusters"),
+        #"nITClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nITClusters"),
+        #"nTTClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nTTClusters"),
+        "nUTClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nUTClusters"),
+        #"nOTClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nOTClusters"),
+        "nFTClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nFTClusters"),
+        #"nSPDhits": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nSPDhits"),
+        "eCalTot": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"eCalTot"),
+        "hCalTot": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"hCalTot"),
+        "nEcalClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nEcalClusters"),
+        #"nMuonCoordsS0": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonCoordsS0"),
+        #"nMuonCoordsS1": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonCoordsS1"),
+        #"nMuonCoordsS2": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonCoordsS2"),
+        #"nMuonCoordsS3": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonCoordsS3"),
+        #"nMuonCoordsS4": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonCoordsS4"),
+        #"nMuonTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonTracks"),
+        "ALLPVX": F.ALLPVX(PVs),
+        "ALLPVY": F.ALLPVY(PVs),
+        "ALLPVZ": F.ALLPVZ(PVs),
+        })
+    evt_vars += FC.EventInfo()
+
+    evt_vars += FC.SelectionInfo( selection_type="Hlt1", trigger_lines=hlt1_lines)                                               
+    evt_vars += FC.SelectionInfo( selection_type="Hlt2", trigger_lines=sprucing_lines)
+    # duplicated from FC    if ODIN:
+    # duplicated from FC        evt_vars.update({ 'BUNCHCROSSING_ID' : F.BUNCHCROSSING_ID(ODIN)})
+    # duplicated from FC        evt_vars.update({ 'BUNCHCROSSING_TYPE' : F.BUNCHCROSSING_TYPE(ODIN)})
+
+    if decreports:
+        evt_vars.update(
+                {
+                    "DECISIONS": F.DECISIONS(
+                        Lines=[bd2dsk_line + "Decision"], DecReports=decreports
+                        )
+                    }
+                )
+        evt_vars.update(
+                {
+                    "DECREPORTS_FILTER": F.DECREPORTS_FILTER(
+                        Lines=[bd2dsk_line + "Decision"], DecReports=decreports
+                        )
+                    }
+                )
+
+    if ODIN:
+        evt_vars.update({"EVENTTYPE": F.EVENTTYPE(ODIN)})
+
+    # duplicated from FC        evt_vars.update({ 'GPSTIME' : F.GPSTIME(ODIN)})
+    # duplicated from FC        evt_vars.update({ 'ODINTCK' : F.ODINTCK(ODIN)})
+
+    evt_vars.update({"PV_SIZE": F.SIZE(PVs)})
+    # duplicated from FC        evt_vars.update({ 'GPSTIME' : F.GPSTIME(ODIN)})
+    # duplicated from FC        evt_vars.update({ 'ODINTCK' : F.ODINTCK(ODIN)})
+
+    if decreports:
+        evt_vars.update({"TCK": F.TCK(decreports)})
+
+    print(f"### For event returning variables {evt_vars.functor_dict.keys()}")
+    return evt_vars
+
+def candidate_variables(pvs, particles):
+    abbrs = default_names(particles)
+    names = all_particles(particles)
+    variables_B = {abbr: all_variables(pvs, None, particle_df["type"][name]) for abbr, name in zip(abbrs, names)}
+    return variables_B
+
+from DecayTreeFitter import DecayTreeFitter
+def make_dtf_variables(pvs, data, particles, pv_constraint=False, mass_constraints=[], label_forLLDD=""):
+
+    abbrs = default_names(particles)
+    names = all_particles(particles)
+
+    if pv_constraint: dtf_name = "DTF_PV_"
+    else: dtf_name = "DTF_"
+    dtf_name += "".join(f"{par}_" for par in particle_df["abbr"][mass_constraints])
+    my_hash='{hash}'
+
+    DTF = DecayTreeFitter(
+            name = f"{dtf_name}DecayTreeFitter_{my_hash}",
+            input_particles = data,
+            input_pvs = (pv_constraint and pvs),
+            mass_constraints = mass_constraints,
+            )
+
+    dtf_dict = {}
+
+    shared_variables = FunctorCollection(
+                {   "ETA": F.ETA,
+                    "PHI": F.PHI,
+                    "BPVIPCHI2": F.BPVIPCHI2(pvs),
+                    "BPVIP": F.BPVIP(pvs),
+                    "BPVX": F.BPVX(pvs),
+                    "BPVY": F.BPVY(pvs),
+                    "BPVZ": F.BPVZ(pvs),
+                    }
+                ) + FC.Kinematics()
+
+    dtf_quality_variables = FunctorCollection( {
+            dtf_name+"_DTFCHI2": DTF.CHI2,
+            dtf_name+"_DTFNDOF": DTF.NDOF,
+            dtf_name+"_CTAU": DTF.CTAU,
+            dtf_name+"_CTAUERR": DTF.CTAUERR,
+            dtf_name+"_MERR": DTF.MASSERR,
+            }
+        )
+
+    #make branches
+    for abbr, name in zip(abbrs, names):
+        is_basic = particle_df["type"][name]
+        if is_basic:
+            orig_variables = shared_variables + FunctorCollection(
+                    {   "TX"          : F.TX,
+                        "TY"          : F.TY,
+                        "MINIPCHI2"   : F.MINIPCHI2(pvs),
+                        "MINIP"       : F.MINIP(pvs),
+                        "KEY"         : F.VALUE_OR(-1) @ F.OBJECT_KEY @ F.TRACK,
+                        "TRGHOSTPROB": F.GHOSTPROB,
+                        "TRACKPT": F.TRACK_PT,
+                        "TRACKHISTORY": F.VALUE_OR(-1) @ F.TRACKHISTORY @ F.TRACK,
+                        "QOVERP": F.QOVERP @ F.TRACK,
+                        "TRCHI2DOF": F.CHI2DOF @ F.TRACK,
+                        "NDOF": F.VALUE_OR(-1) @ F.NDOF @ F.TRACK,
+                        "PROBNN_P": F.PROBNN_P,
+                        }
+                    )
+        else:
+            orig_variables = shared_variables + FunctorCollection(
+                {   "MAXPT": F.MAX(F.PT),
+                    "MINPT": F.MIN(F.PT),
+                    "SUMPT": F.SUM(F.PT),
+                    "MAXP": F.MAX(F.P),
+                    "MINP": F.MIN(F.P),
+                    "BPVDIRA": F.BPVDIRA(pvs),
+                    "CHI2DOF": F.CHI2DOF, #CHI2VXNDOF
+                    "BPVFDCHI2": F.BPVFDCHI2(pvs),
+                    "BPVFD": F.BPVFD(pvs),
+                    "BPVVDRHO": F.BPVVDRHO(pvs),
+                    "BPVVDZ": F.BPVVDZ(pvs),
+                    "BPVLTIME": F.BPVLTIME(pvs),
+                    "END_VX": F.END_VX, #END_
+                    "END_VY": F.END_VY,
+                    "END_VZ": F.END_VZ,
+                    }
+                )
+            orig_variables += dtf_quality_variables
+
+        dtf_variables = FunctorCollection({ dtf_name + expr: DTF(func) for expr, func in orig_variables.get_thor_functors().items() })
+        dtf_dict.update( {abbr: dtf_variables} )
+
+    return dtf_dict
+
+import re
+def convert_cut(string_cut):
+    cuts = string_cut.split("&")
+    paras = ["BPV", "PID_", "MASS", "CHARGE", "CHI2DOF", "END", "IS_ID", "ABS_", "MAX", "SUM", "GHOSTPROB", "CHILD", "PROBNN_P"]
+    values = []
+    for cut in cuts:
+        value = cut
+        for para in paras:
+            if para in cut:
+                value = value.replace(para, f"F.{para}")
+        value = re.sub(r"\bPT\b", "F.PT", value)
+        value = re.sub(r"\bP\b", "F.P", value)
+        value = re.sub(r"\bCHI2\b", "F.CHI2", value)
+        values.append(value)
+    functor_cut = ",".join(values)
+    return eval(f"F.require_all({functor_cut})")
+#return f"F.require_all({functor_cut})"