Skip to content
Snippets Groups Projects
Commit 4b8e1eaf authored by Jianqiao Wang's avatar Jianqiao Wang
Browse files

Two new decays

parent cd2c313f
No related branches found
No related tags found
4 merge requests!1748Draft: B hardron to J/psi V0,!1627New decay and decay involving DD V0,!1626Draft: New run3 3body decays,!1548Jianqiao/run3 dpa exotic
Pipeline #7783265 passed
from .template import *
def Lb2JpsipK(options: Options):
algs = template(["Lambda_b0", "J/psi(1S)", "p+", "K-"], True, "Lb2JpsipK")
return make_config( options, algs )
def Lb2Jpsippi(options: Options):
algs = template(["Lambda_b0", "J/psi(1S)", "p+", "pi-"], True, "Lb2Jpsippi")
return make_config( options, algs )
def B2JpsiKpi(options: Options):
algs = template(["B0", "J/psi(1S)", "K+", "pi-"], True, "B2JpsiKpi")
return make_config( options, algs )
def Xib2JpsiLambdaK(options: Options):
algs = template(["Xi_b-", "J/psi(1S)", "Lambda0", "K-"], True, "Xib2JpsiLambdaK")
return make_config( options, algs )
def Bp2JpsipLambdabar(options: Options):
algs = template(["B+", "J/psi(1S)", "p+", "Lambda~0"], True, "Bp2JpsipLambdabar")
return make_config( options, algs )
def Bp2JpsiphiK(options: Options):
algs = template(["B+", "J/psi(1S)", "phi(1020)", "K+"], True, "Bp2JpsiphiK")
return make_config( options, algs )
def B2Jpsippbar(options: Options):
algs = template(["B_s0", "J/psi(1S)", "p+", "p~-"], False, "B2Jpsippbar")
return make_config( options, algs )
def B2Jpsipipi(options: Options):
algs = template(["B0", "J/psi(1S)", "pi+", "pi-"], False, "B2Jpsipipi")
return make_config( options, algs )
def B2JpsiKK(options: Options):
algs = template(["B0", "J/psi(1S)", "K+", "K-"], False, "B2JpsiKK")
return make_config( options, algs )
defaults:
application: DaVinci/v64r5
wg: BandQ
inform:
- mengzhen.wang@cern.ch
- jianqiao.wang@cern.ch
# data configuration
{%- set datasets = [
('Collision24', 'Sprucing24c2/90000000', 'BANDQ', 'B2Jpsipipi', 'MagDown', 'VeloClosed', 'Spruce', 'bandq'),
('Collision24', 'Sprucing24c2/90000000', 'BANDQ', 'B2JpsiKK', 'MagDown', 'VeloClosed', 'Spruce', 'bandq'),
]%}
{%- set dv_platform_detdesc = "x86_64_v2-el9-gcc13+detdesc-opt" %}
{%- for data, Type, wg, decay, polarity, Velo, process, stream in datasets %}
data_{{decay}}_{{wg}}_{{process}}_{{data}}_{{Velo}}_{{polarity}}:
application: "DaVinci/v64r7"
input:
bk_query: "/LHCb/{{data}}/Beam6800GeV-{{Velo}}-{{polarity}}/Real Data/{{Type}}/{{wg}}.DST"
dq_flags:
- OK
- UNCHECKED
keep_running: True
n_test_lfns: 1
options:
entrypoint: Run3_b2psiX_3body_withUT.dv_data:{{decay}}
extra_options:
input_type: ROOT
input_process: "{{process}}"
input_stream: "{{stream}}"
input_raw_format: 0.5
simulation: False
data_type: "Upgrade"
geometry_version: run3/2024.Q1.2-v00.00
conditions_version: master
output: Data_{{process}}_{{stream}}_{{decay}}.ROOT
{%- endfor %}
from .tools.tupling_maker import *
from .tools.descriptor_writer import *
from Hlt2Conf.standard_particles import make_long_kaons, make_long_protons, make_long_pions, make_LambdaLL, make_long_muons, make_phi2kk
def template(particles, cc, directory):
##### Default inputs
sprucing_line = 'SpruceBandQ_JpsiToMuMuDetached'
Jpsi = get_particles(f"/Event/Spruce/{sprucing_line}/Particles")
rec_sum = get_rec_summary()
v2_pvs = get_pvs()
odin = get_odin()
decreports = None
long_pions = make_long_pions()
long_kaons = make_long_kaons()
long_protons = make_long_protons()
DaughterCutp = "(BPVIPCHI2(get_pvs())>9) & (PT>250*MeV) & (P>10*GeV) & (PID_P>5) & (PID_P-PID_K>5) & (GHOSTPROB<0.5)"
DaughterCutpi = "(BPVIPCHI2(get_pvs())>9) & (PT>250*MeV) & (PID_K<5) & (GHOSTPROB<0.5)"
DaughterCutK = "(BPVIPCHI2(get_pvs())>9) & (PT>250*MeV) & (PID_K>5) & (GHOSTPROB<0.5)"
protons = ParticleFilter( long_protons, Cut = F.FILTER( convert_cut(DaughterCutp) ) )
kaons = ParticleFilter( long_kaons, Cut = F.FILTER( convert_cut(DaughterCutK) ) )
pions = ParticleFilter( long_pions, Cut = F.FILTER( convert_cut(DaughterCutpi) ) )
muons = make_long_muons()
Lambda = make_LambdaLL()
phi = make_phi2kk(kaon_pidk_min=5)
input_dict = {"J/psi(1S)": Jpsi, "phi(1020)": phi,
"Lambda0":Lambda, "Lambda~0":Lambda,
"pi+": pions, "pi-": pions,
"K+": kaons, "K-": kaons,
"p+": protons, "p~-": protons,
"mu+": muons, "mu~-": muons,
}
# create particle containers
B2JpsiX = ParticleCombiner(
Inputs = [input_dict[particle] for particle in particles[1:]],
DecayDescriptor = descriptor(particles, cc),
name = "JpsiToMuMu_Detached_line_validation",
CombinationCut = convert_cut("in_range(4000, MASS, 7000) & (CHILD(2, PT)+CHILD(3, PT)>900*MeV)"),
#CombinationCut = convert_cut("in_range(5200, MASS, 7000)"),
CompositeCut = convert_cut("in_range(4000, MASS, 7000) & CHI2DOF<20 & BPVLTIME(get_pvs())>0.2*picosecond & BPVVDZ(get_pvs())>0.*mm")
)
#FunTuple: define branches.
hlt1_trigger_lines = [ 'Hlt1TrackMVADecision',
'Hlt1TwoTrackMVADecision',
'Hlt1D2KKDecision',
'Hlt1D2KPiDecision',
'Hlt1D2PiPiDecision',
'Hlt1DiMuonHighMassDecision',
'Hlt1DiMuonLowMassDecision',
'Hlt1DiMuonSoftDecision',
'Hlt1KsToPiPiDecision',
'Hlt1LowPtMuonDecision',
'Hlt1LowPtDiMuonDecision',
'Hlt1SingleHighPtMuonDecision',
'Hlt1TrackMuonMVADecision']
line_prefilter = create_lines_filter(name=f"PreFilter_{sprucing_line}", lines=[sprucing_line])
evt_vars = event_variables(v2_pvs, odin, decreports, rec_sum, hlt1_trigger_lines, [sprucing_line])
candidate_vars = candidate_variables(v2_pvs, particles)
tistos_vars = tistos_variables(hlt1_trigger_lines, [], Jpsi, False)
composite_particles = [ particle for particle in particles if (particle_df["type"][particle] != "basic") and (particle != "phi(1020)") ]
dtf_vars1 = make_dtf_variables(v2_pvs, B2JpsiX, particles, True, composite_particles)
dtf_vars2 = make_dtf_variables(v2_pvs, B2JpsiX, particles, True, composite_particles[1:])
for particle in particles:
if particle_df["type"][particle] != "basic":
candidate_vars[particle_df["abbr"][particle]] += tistos_vars
for key in candidate_vars.keys():
candidate_vars[key] += dtf_vars1[key]
candidate_vars[key] += dtf_vars2[key]
B2JpsiX_branches = default_branches(particles, cc)
# define tupling algorithms
B2JpsiX_tuple = FunTuple_Particles( name=directory,
inputs=B2JpsiX,
tuple_name="DecayTree",
fields=B2JpsiX_branches,
variables=candidate_vars,
store_multiple_cand_info = True,
event_variables = evt_vars)
algs = { f"{directory}_tuple":[ B2JpsiX_tuple, line_prefilter ],}
return algs
import pandas as pd
import numpy as np
particle_dict = {"Lambda_b0": {"abbr": "Lb", "type": "toplevel", "daughters":[]},
"Xi_b-": {"abbr": "Xibm", "type": "toplevel", "daughters":[]},
"Xi_b0": {"abbr": "Xib0", "type": "toplevel", "daughters":[]},
"B0": {"abbr": "B0", "type": "toplevel", "daughters":[]},
"B+": {"abbr": "Bp", "type": "toplevel", "daughters":[]},
"B-": {"abbr": "Bm", "type": "toplevel", "daughters":[]},
"B_s0": {"abbr": "Bs", "type": "toplevel", "daughters":[]},
"J/psi(1S)": {"abbr": "Jpsi", "type": "composite", "daughters": ["mu+", "mu-"]},
"psi(2S)": {"abbr": "psi2S", "type": "composite", "daughters": ["mu+", "mu-"]},
"phi(1020)": {"abbr": "phi", "type": "composite", "daughters": ["K+", "K-"]},
"Lambda0": {"abbr": "Lambda", "type": "composite", "daughters": ["p+", "pi-"]},
"Lambda~0": {"abbr": "Lambda", "type": "composite", "daughters": ["p~-", "pi+"]},
"mu+": {"abbr": "mup", "type": "basic", "daughters":[]},
"mu-": {"abbr": "mum", "type": "basic", "daughters":[]},
"e+": {"abbr": "ep", "type": "basic", "daughters":[]},
"e-": {"abbr": "em", "type": "basic", "daughters":[]},
"pi+": {"abbr": "Pip", "type": "basic", "daughters":[]},
"pi-": {"abbr": "Pim", "type": "basic", "daughters":[]},
"K+": {"abbr": "Kp", "type": "basic", "daughters":[]},
"K-": {"abbr": "Km", "type": "basic", "daughters":[]},
"p+": {"abbr": "Pp", "type": "basic", "daughters":[]},
"p~-": {"abbr": "Pm", "type": "basic", "daughters":[]},
}
particle_df = pd.DataFrame(particle_dict).T
def descriptor(particles, cc=False):
decay_descriptor = f"{particles[0]} ->"
for particle in particles[1:]:
decay_descriptor += f" {particle}"
if cc: decay_descriptor = f"[{decay_descriptor}]cc"
return decay_descriptor
def decay_branches(mother, daughters, decay_descriptor):
psi_pos = 0
phi_pos = 0
Lambda_pos = 0
Pm_pos = Pp_pos = 0
Km_pos = Kp_pos = 0
Pim_pos = Pip_pos = 0
mum_pos = mup_pos = 0
em_pos = ep_pos = 0
#if decay_descriptor[0] == "[" or decay_descriptor[-3:] == "]cc":
if "cc" in decay_descriptor[-5:]:
branch_descriptor = decay_descriptor[:-2] + "CC"
else: branch_descriptor = decay_descriptor
#if "J/psi(1S)" in decay_descriptor:
# branch_descriptor = branch_descriptor.replace("J/psi(1S)", "(J/psi(1S) -> mu+ mu-)")
#if "psi(2S)" in decay_descriptor:
# branch_descriptor = branch_descriptor.replace("psi(2S)", "(psi(2S) -> mu+ mu-)")
#if "Lambda0" in decay_descriptor:
# branch_descriptor = branch_descriptor.replace("Lambda0", "(Lambda0 -> p+ pi-)")
#if "Lambda~0" in decay_descriptor:
# branch_descriptor = branch_descriptor.replace("Lambda~0", "(Lambda~0 -> p~- pi+)")
for comp_par in particle_df.query("type=='composite'").index:
if comp_par in decay_descriptor:
#branch_descriptor = branch_descriptor.replace(comp_par, comp_par + "->" + " ".join(particle_df["daughters"][comp_par]))
branch_descriptor = branch_descriptor.replace(comp_par, f'({comp_par} -> {" ".join(particle_df["daughters"][comp_par])})')
branches = {mother: branch_descriptor}
for daughter in daughters:
if "psi" in daughter:
true_pos = branch_descriptor.rfind("(", 0, branch_descriptor.find("psi", psi_pos))
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
psi_pos = branch_descriptor.find("psi", psi_pos) + len("psi(nS)")
if "phi" in daughter:
true_pos = branch_descriptor.rfind("(", 0, branch_descriptor.find("phi", phi_pos))
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
phi_pos = branch_descriptor.find("phi", phi_pos) + len("phi(1020)")
if "Lambda" in daughter:
true_pos = branch_descriptor.rfind("(", 0, branch_descriptor.find("Lambda", Lambda_pos))
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
Lambda_pos = branch_descriptor.find("Lambda", Lambda_pos) + len("Lambda~0")
if "Pp" in daughter:
true_pos = branch_descriptor.find("p+", Pp_pos)
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
Pp_pos = true_pos + len("p+")
if "Pm" in daughter:
true_pos = branch_descriptor.find("p~-", Pm_pos)
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
Pm_pos = true_pos + len("p~-")
if "Kp" in daughter:
true_pos = branch_descriptor.find("K+", Kp_pos)
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
Kp_pos = true_pos + len("K+")
if "Km" in daughter:
true_pos = branch_descriptor.find("K-", Km_pos)
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
Km_pos = true_pos + len("K-")
if "Pip" in daughter or "Hp" in daughter:
true_pos = branch_descriptor.find("pi+", Pip_pos)
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
Pip_pos = true_pos + len("pi+")
if "Pim" in daughter or "Hm" in daughter:
true_pos = branch_descriptor.find("pi-", Pim_pos)
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
Pim_pos = true_pos + len("pi-")
if "mup" in daughter:
true_pos = branch_descriptor.find("mu+", mup_pos)
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
mup_pos = true_pos + len("mu+")
if "mum" in daughter:
true_pos = branch_descriptor.find("mu-", mum_pos)
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
mum_pos = true_pos + len("mu-")
if "ep" in daughter:
true_pos = branch_descriptor.find("e+", ep_pos)
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
ep_pos = true_pos + len("e+")
if "em" in daughter:
true_pos = branch_descriptor.find("e-", em_pos)
branches.update({daughter: branch_descriptor[:true_pos]+"^"+branch_descriptor[true_pos:]})
em_pos = true_pos + len("e-")
return branches
def all_particles(particles):
all_pars = []
for particle in particles:
all_pars += ( [ particle ] + particle_df["daughters"][particle])
return all_pars
#print(all_particles(["B_s0","J/psi(1S)", "p+", "p~-"]))
def default_names(particles):
names = []
for particle in particles:
abbr = particle_df["abbr"][ particle ]
names.append( abbr )
names += [f"{daughter_abbr}_{abbr}" for daughter_abbr in particle_df["abbr"][particle_df["daughters"][particle]]]
return names
def default_branches(particles, cc=False):
names = default_names(particles)
return decay_branches(names[0], names[1:], descriptor(particles, cc))
branches = default_branches( ["B+","J/psi(1S)", "phi(1020)", "K+"], True)
print(branches)
#branches = decay_branches("Bs", ["Jpsi", "Pp", "Pm", "mup", "mum"], "[B_s0 -> J/psi(1S) p+ p~-]cc")
#print(branches)
#branches = default_names( ["B_s0","J/psi(1S)", "p+", "p~-"])
#print(descriptor(["Lambda_b0", "J/psi(1S)", "p+", "K-"], True))
from DaVinci import make_config, Options
from DaVinci.algorithms import create_lines_filter
from PyConf.reading import get_rec_summary, get_pvs
from PyConf.reading import get_particles
import FunTuple.functorcollections as FC
import Functors as F
from FunTuple import FunctorCollection
from FunTuple import FunTuple_Event
from FunTuple import FunTuple_Particles
from .descriptor_writer import *
#from PyConf.reading import (get_particles, get_charged_protoparticles, get_pvs, get_rec_summary, _get_unpacked)
from Functors.math import in_range
from Hlt2Conf.algorithms_thor import ParticleCombiner, ParticleFilter
from RecoConf.event_filters import require_pvs
from Hlt2Conf.standard_particles import get_long_track_selector, standard_protoparticle_filter
from PyConf.Algorithms import FunctionalParticleMaker
from PyConf.reading import get_charged_protoparticles as _make_charged_protoparticles
from PyConf.reading import get_odin
from GaudiKernel.SystemOfUnits import MeV, picosecond, mm, GeV
from DecayTreeFitter import DecayTreeFitter
_basic = "basic"
_composite = "composite"
_toplevel = "toplevel"
def all_variables(pvs, mctruth, ptype, candidates=None, ftAlg=None):
if ptype not in [_basic, _composite]:
Exception(f"I want {_basic} or {_composite}. Got {ptype}")
all_vars = FunctorCollection({})
comp = _composite == ptype or _toplevel == ptype # is composite
basic = _basic == ptype # is not composite
top = _toplevel == ptype # the B
all_vars += FC.Kinematics()
if basic:
all_vars += FC.ParticleID(extra_info=True)
if comp:
all_vars.update({"ALV": F.ALV(Child1=1, Child2=2)})
if comp: # all these require a vertex
all_vars.update({"BPVCORRM": F.BPVCORRM(pvs)})
all_vars.update({"BPVCORRMERR": F.BPVCORRMERR(pvs)})
all_vars.update({"BPVDIRA": F.BPVDIRA(pvs)})
all_vars.update({"BPVDLS": F.BPVDLS(pvs)})
all_vars.update({"BPVETA": F.BPVETA(pvs)})
all_vars.update({"BPVFD": F.BPVFD(pvs)})
all_vars.update({"BPVFDCHI2": F.BPVFDCHI2(pvs)})
all_vars.update({"BPVFDIR": F.BPVFDIR(pvs)})
all_vars.update({"BPVFDVEC": F.BPVFDVEC(pvs)})
all_vars.update({"BPVIP": F.BPVIP(pvs)})
all_vars.update({"BPVIPCHI2": F.BPVIPCHI2(pvs)})
all_vars.update({"BPVX": F.BPVX(pvs)})
all_vars.update({"BPVY": F.BPVY(pvs)})
all_vars.update({"BPVZ": F.BPVZ(pvs)})
all_vars.update({"ALLPVX": F.ALLPVX(pvs)})
all_vars.update({"ALLPVY": F.ALLPVY(pvs)})
all_vars.update({"ALLPVZ": F.ALLPVZ(pvs)})
if comp: # all these require a vertex
all_vars.update({"ALLPV_FD": F.ALLPV_FD(pvs)})
all_vars.update({"ALLPV_IP": F.ALLPV_IP(pvs)})
all_vars.update({"BPVLTIME": F.BPVLTIME(pvs)})
all_vars.update({"BPVVDRHO": F.BPVVDRHO(pvs)})
all_vars.update({"BPVVDX": F.BPVVDX(pvs)})
all_vars.update({"BPVVDY": F.BPVVDY(pvs)})
all_vars.update({"BPVVDZ": F.BPVVDZ(pvs)})
all_vars.update({"CHARGE": F.CHARGE})
all_vars.update({"CHI2": F.CHI2})
all_vars.update({"CHI2DOF": F.CHI2DOF})
#if top: # apply this only to B
# all_vars.update({"CHILD1_PT": F.CHILD(1, F.PT)}) # example of CHILD
# all_vars.update({"Ds_END_VZ": F.CHILD(1, F.END_VZ)})
# all_vars.update({"Delta_END_VZ_DsB0": F.CHILD(1, F.END_VZ) - F.END_VZ})
if comp:
#all_vars.update({"DOCA": F.SDOCA(Child1=1, Child2=2)})
#all_vars.update({"DOCACHI2": F.SDOCACHI2(Child1=1, Child2=2)})
all_vars.update({"END_VRHO": F.END_VRHO})
all_vars.update({"END_VX": F.END_VX})
all_vars.update({"END_VY": F.END_VY})
all_vars.update({"END_VZ": F.END_VZ})
# duplicated from FC all_vars.update({"ENERGY" : F.ENERGY})
all_vars.update({"ETA": F.ETA})
all_vars.update({"FOURMOMENTUM": F.FOURMOMENTUM})
all_vars.update({"ISBASIC": F.ISBASICPARTICLE})
if basic:
all_vars.update({"GHOSTPROB": F.GHOSTPROB})
all_vars.update({"ISMUON": F.ISMUON})
all_vars.update({"INMUON": F.INMUON})
all_vars.update({"INECAL": F.INECAL})
all_vars.update({"INHCAL": F.INHCAL})
all_vars.update({"HASBREM": F.HASBREM})
all_vars.update({"BREMENERGY": F.BREMENERGY})
all_vars.update({"BREMBENDCORR": F.BREMBENDCORR})
all_vars.update({"BREMPIDE": F.BREMPIDE})
all_vars.update({"ECALPIDE": F.ECALPIDE})
all_vars.update({"ECALPIDMU": F.ECALPIDMU})
all_vars.update({"HCALPIDE": F.HCALPIDE})
all_vars.update({"HCALPIDMU": F.HCALPIDMU})
all_vars.update({"ELECTRONSHOWEREOP": F.ELECTRONSHOWEREOP})
all_vars.update({"CLUSTERMATCH": F.CLUSTERMATCH_CHI2})
all_vars.update({"ELECTRONMATCH": F.ELECTRONMATCH_CHI2})
all_vars.update({"BREMHYPOMATCH": F.BREMHYPOMATCH_CHI2})
all_vars.update({"ELECTRONENERGY": F.ELECTRONENERGY})
all_vars.update({"BREMHYPOENERGY": F.BREMHYPOENERGY})
all_vars.update({"BREMHYPODELTAX": F.BREMHYPODELTAX})
all_vars.update({"ELECTRONID": F.ELECTRONID})
all_vars.update({"HCALEOP": F.HCALEOP})
all_vars.update({"TRACK_MOM_": F.TRACK_MOMVEC})
all_vars.update({"TRACK_POS_CLOSESTTOBEAM_": F.TRACK_POSVEC_CLOSESTTOBEAM})
all_vars.update({"IS_ID_pi": F.IS_ID("pi-")})
all_vars.update({"PDG_MASS_pi": F.PDG_MASS("pi+")})
all_vars.update({"SIGNED_DELTA_MASS_pi": F.SIGNED_DELTA_MASS("pi+")})
all_vars.update({"ABS_DELTA_MASS_pi": F.ABS_DELTA_MASS("pi+")})
all_vars.update({"IS_NOT_H": F.IS_NOT_H})
all_vars.update({"IS_PHOTON": F.IS_PHOTON})
if comp:
all_vars.update({"MAXPT": F.MAX(F.PT)})
all_vars.update({"MAXDOCA": F.MAXSDOCA})
all_vars.update({"MAXDOCACHI2": F.MAXSDOCACHI2})
#all_vars.update({"MINDOCA": F.MINSDOCA})
#all_vars.update({"MINDOCACHI2": F.MINSDOCACHI2})
# the above in cut versions.
# duplicated from FC all_vars.update({ 'MC_MOTHER_ID' : F.VALUE_OR(0) @ mctruth(
# duplicated from FC F.MC_MOTHER(1, F.PARTICLE_ID))})
if comp:
all_vars.update({"MINPT": F.MIN(F.PT)})
all_vars.update({"MINIP": F.MINIP(pvs)})
all_vars.update({"MINIPCHI2": F.MINIPCHI2(pvs)})
if basic:
all_vars.update({"TRACKPT": F.TRACK_PT})
all_vars.update({"TRACKHISTORY": F.VALUE_OR(-1) @ F.TRACKHISTORY @ F.TRACK})
all_vars.update({"QOVERP": F.QOVERP @ F.TRACK})
all_vars.update({"NDOF": F.VALUE_OR(-1) @ F.NDOF @ F.TRACK})
all_vars.update({"NFTHITS": F.VALUE_OR(-1) @ F.NFTHITS @ F.TRACK})
all_vars.update({"NHITS": F.VALUE_OR(-1) @ F.NHITS @ F.TRACK})
all_vars.update({"NUTHITS": F.VALUE_OR(-1) @ F.NUTHITS @ F.TRACK})
all_vars.update({"NVPHITS": F.VALUE_OR(-1) @ F.NVPHITS @ F.TRACK})
all_vars.update({"TRACKHASVELO": F.VALUE_OR(-1) @ F.TRACKHASVELO @ F.TRACK})
all_vars.update({"TRACKHASUT": F.VALUE_OR(-1) @ F.TRACKHASUT @ F.TRACK})
all_vars.update({"OBJECT_KEY": F.OBJECT_KEY})
all_vars.update({"PHI": F.PHI})
all_vars.update({"ABS_PX": F.ABS @ F.PX})
all_vars.update({"REFERENCEPOINT_X": F.REFERENCEPOINT_X})
all_vars.update({"REFERENCEPOINT_Y": F.REFERENCEPOINT_Y})
all_vars.update({"REFERENCEPOINT_Z": F.REFERENCEPOINT_Z})
if comp:
all_vars.update({"SDOCA12": F.SDOCA(1, 2)})
all_vars.update({"SDOCA12_CHI2": F.SDOCACHI2(1, 2)})
if basic:
all_vars.update({"SHOWER_SHAPE": F.CALO_NEUTRAL_SHOWER_SHAPE})
if comp:
all_vars.update({"SUBCOMB12_MM": F.SUBCOMB(Functor=F.MASS, Indices=(1, 2))})
all_vars.update({"SUMPT": F.SUM(F.PT)})
if top:
all_vars.update({"SDOCA13": F.SDOCA(1, 3)})
all_vars.update({"SDOCA13_CHI2": F.SDOCACHI2(1, 3)})
all_vars.update({"SDOCA23": F.SDOCA(2, 3)})
all_vars.update({"SDOCA23_CHI2": F.SDOCACHI2(2, 3)})
all_vars.update({"SUBCOMB13_MM": F.SUBCOMB(Functor=F.MASS, Indices=(1, 3))})
all_vars.update({"SUBCOMB23_MM": F.SUBCOMB(Functor=F.MASS, Indices=(2, 3))})
if basic:
all_vars.update({"TX": F.TX})
all_vars.update({"TY": F.TY})
print(f"### For {ptype} returning variables {all_vars.functor_dict.keys()}")
return all_vars
def tistos_variables(Hlt1_decisions, Hlt2_decisions, data, isturbo):
tistos_vars = FunctorCollection({})
tistos_vars += FC.HltTisTos( selection_type="Hlt1", trigger_lines=Hlt1_decisions, data=data)
if not isturbo:
tistos_vars += FC.HltTisTos( selection_type="Hlt2", trigger_lines=Hlt2_decisions, data=data)
return tistos_vars
def event_variables(PVs, ODIN, decreports, rec_sum, hlt1_lines, sprucing_lines):
"""
event variables
"""
## Some empty summaries removed
evt_vars = FunctorCollection({
"nPVs": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nPVs"),
"nTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nTracks"),
"nLongTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nLongTracks"),
"nDownstreamTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nDownstreamTracks"),
"nUpstreamTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nUpstreamTracks"),
"nVeloTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nVeloTracks"),
"nBackTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nBackTracks"),
"nGhosts": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nGhosts"),
"nRich1Hits": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nRich1Hits"),
"nRich2Hits": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nRich2Hits"),
#"nVeloClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nVeloClusters"),
"nVPClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nVPClusters"),
#"nITClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nITClusters"),
#"nTTClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nTTClusters"),
"nUTClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nUTClusters"),
#"nOTClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nOTClusters"),
"nFTClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nFTClusters"),
#"nSPDhits": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nSPDhits"),
"eCalTot": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"eCalTot"),
"hCalTot": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"hCalTot"),
"nEcalClusters": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nEcalClusters"),
#"nMuonCoordsS0": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonCoordsS0"),
#"nMuonCoordsS1": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonCoordsS1"),
#"nMuonCoordsS2": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonCoordsS2"),
#"nMuonCoordsS3": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonCoordsS3"),
#"nMuonCoordsS4": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonCoordsS4"),
#"nMuonTracks": F.VALUE_OR(-1) @F.RECSUMMARY_INFO(rec_sum,"nMuonTracks"),
"ALLPVX": F.ALLPVX(PVs),
"ALLPVY": F.ALLPVY(PVs),
"ALLPVZ": F.ALLPVZ(PVs),
})
evt_vars += FC.EventInfo()
evt_vars += FC.SelectionInfo( selection_type="Hlt1", trigger_lines=hlt1_lines)
evt_vars += FC.SelectionInfo( selection_type="Hlt2", trigger_lines=sprucing_lines)
# duplicated from FC if ODIN:
# duplicated from FC evt_vars.update({ 'BUNCHCROSSING_ID' : F.BUNCHCROSSING_ID(ODIN)})
# duplicated from FC evt_vars.update({ 'BUNCHCROSSING_TYPE' : F.BUNCHCROSSING_TYPE(ODIN)})
if decreports:
evt_vars.update(
{
"DECISIONS": F.DECISIONS(
Lines=[bd2dsk_line + "Decision"], DecReports=decreports
)
}
)
evt_vars.update(
{
"DECREPORTS_FILTER": F.DECREPORTS_FILTER(
Lines=[bd2dsk_line + "Decision"], DecReports=decreports
)
}
)
if ODIN:
evt_vars.update({"EVENTTYPE": F.EVENTTYPE(ODIN)})
# duplicated from FC evt_vars.update({ 'GPSTIME' : F.GPSTIME(ODIN)})
# duplicated from FC evt_vars.update({ 'ODINTCK' : F.ODINTCK(ODIN)})
evt_vars.update({"PV_SIZE": F.SIZE(PVs)})
# duplicated from FC evt_vars.update({ 'GPSTIME' : F.GPSTIME(ODIN)})
# duplicated from FC evt_vars.update({ 'ODINTCK' : F.ODINTCK(ODIN)})
if decreports:
evt_vars.update({"TCK": F.TCK(decreports)})
print(f"### For event returning variables {evt_vars.functor_dict.keys()}")
return evt_vars
def candidate_variables(pvs, particles):
abbrs = default_names(particles)
names = all_particles(particles)
variables_B = {abbr: all_variables(pvs, None, particle_df["type"][name]) for abbr, name in zip(abbrs, names)}
return variables_B
from DecayTreeFitter import DecayTreeFitter
def make_dtf_variables(pvs, data, particles, pv_constraint=False, mass_constraints=[]):
abbrs = default_names(particles)
names = all_particles(particles)
if pv_constraint: dtf_name = "DTF_PV_"
else: dtf_name = "DTF_"
dtf_name += "".join(f"{par}_" for par in particle_df["abbr"][mass_constraints])
DTF = DecayTreeFitter(
name = f"{dtf_name}DecayTreeFitter",
input_particles = data,
input_pvs = (pv_constraint and pvs),
mass_constraints = mass_constraints,
)
dtf_dict = {}
shared_variables = FunctorCollection(
{ "ETA": F.ETA,
"PHI": F.PHI,
"BPVIPCHI2": F.BPVIPCHI2(pvs),
"BPVIP": F.BPVIP(pvs),
"BPVX": F.BPVX(pvs),
"BPVY": F.BPVY(pvs),
"BPVZ": F.BPVZ(pvs),
}
) + FC.Kinematics()
dtf_quality_variables = FunctorCollection( {
dtf_name+"_DTFCHI2": DTF.CHI2,
dtf_name+"_DTFNDOF": DTF.NDOF,
dtf_name+"_CTAU": DTF.CTAU,
dtf_name+"_CTAUERR": DTF.CTAUERR,
dtf_name+"_MERR": DTF.MASSERR,
}
)
#make branches
for abbr, name in zip(abbrs, names):
is_basic = particle_df["type"][name]
if is_basic:
orig_variables = shared_variables + FunctorCollection(
{ "TX" : F.TX,
"TY" : F.TY,
"MINIPCHI2" : F.MINIPCHI2(pvs),
"MINIP" : F.MINIP(pvs),
"KEY" : F.VALUE_OR(-1) @ F.OBJECT_KEY @ F.TRACK,
"TRGHOSTPROB": F.GHOSTPROB,
"TRACKPT": F.TRACK_PT,
"TRACKHISTORY": F.VALUE_OR(-1) @ F.TRACKHISTORY @ F.TRACK,
"QOVERP": F.QOVERP @ F.TRACK,
"TRCHI2DOF": F.CHI2DOF @ F.TRACK,
"NDOF": F.VALUE_OR(-1) @ F.NDOF @ F.TRACK,
}
)
else:
orig_variables = shared_variables + FunctorCollection(
{ "MAXPT": F.MAX(F.PT),
"MINPT": F.MIN(F.PT),
"SUMPT": F.SUM(F.PT),
"MAXP": F.MAX(F.P),
"MINP": F.MIN(F.P),
"BPVDIRA": F.BPVDIRA(pvs),
"CHI2DOF": F.CHI2DOF, #CHI2VXNDOF
"BPVFDCHI2": F.BPVFDCHI2(pvs),
"BPVFD": F.BPVFD(pvs),
"BPVVDRHO": F.BPVVDRHO(pvs),
"BPVVDZ": F.BPVVDZ(pvs),
"BPVLTIME": F.BPVLTIME(pvs),
"END_VX": F.END_VX, #END_
"END_VY": F.END_VY,
"END_VZ": F.END_VZ,
}
)
orig_variables += dtf_quality_variables
dtf_variables = FunctorCollection({ dtf_name + expr: DTF(func) for expr, func in orig_variables.get_thor_functors().items() })
dtf_dict.update( {abbr: dtf_variables} )
return dtf_dict
import re
def convert_cut(string_cut):
cuts = string_cut.split("&")
paras = ["BPV", "PID_", "MASS", "CHARGE", "CHI2DOF", "END", "IS_ID", "ABS_", "MAX", "SUM", "GHOSTPROB", "CHILD"]
values = []
for cut in cuts:
value = cut
for para in paras:
if para in cut:
value = value.replace(para, f"F.{para}")
value = re.sub(r"\bPT\b", "F.PT", value)
value = re.sub(r"\bP\b", "F.P", value)
value = re.sub(r"\bCHI2\b", "F.CHI2", value)
values.append(value)
functor_cut = ",".join(values)
return eval(f"F.require_all({functor_cut})")
#return f"F.require_all({functor_cut})"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment