Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • laolivei/Allen
  • pvfinder/inference-engine
  • rmatev/Allen
  • suali/Allen
  • mstahl/Allen
  • roneil/Allen
  • graemes/Allen
  • cburr/Allen
  • jonrob/Allen
  • bjashal/Allen-HIP
  • dcampora/MiniAllen
  • brij/Allen
  • raaij/cuda_hlt
  • bsm-fleet/cuda_hlt
  • abrearod/cuda_hlt
  • aalvesju/cuda_hlt
  • lhcb/Allen
17 results
Show changes
Commits on Source (6)
Showing
with 707 additions and 53 deletions
......@@ -217,7 +217,7 @@ else:
for entry in args.tags.split('|'):
build, t = entry.split(':')
tags[build] = t.split(',')
dddb_tag, conddb_tag = tags['dd4hep' if UseDD4Hep else 'detdesc']
dddb_tag, conddb_tag = tags['dd4hep' if UseDD4Hep else 'detdesc']
else:
dddb_tag, conddb_tag = args.tags.split(',')
......
......@@ -46,7 +46,8 @@ expectedFiles=["fileCanvas.pdf", "IPforwardCanvas.pdf",
"longForwardCanvas.pdf", "longMatchingCanvas.pdf",
"occupancyCanvas.pdf", "PIDCanvas.pdf",
"PIDkinCanvas.pdf", "PVcanvas.pdf",
"PVcovCanvas.pdf", "veloCanvas.pdf"]
"PVcovCanvas.pdf", "PVdistCanvas.pdf",
"IPresolutionCanvas.pdf", "veloCanvas.pdf"]
for file in expectedFiles:
if not os.path.exists(file):
......
......@@ -322,8 +322,8 @@ fit_v0_secondary_vertices INFO 1D histograms in directory "fit_v0_s
| number_of_svs | "NSVs" | 10000 | 0.854 | 1.8782 | 5.8281 | 76.047 |
gather_selections INFO 1D histograms in directory "gather_selections" : 2
| ID | Title | # | Mean | RMS | Skewness | Kurtosis |
| line_passes | "line passes" | 17371 | 79.602 | 17.669 | -0.79901 | -0.33106 |
| line_rates | "line rates" | 834 | 67.189 | 28.489 | -0.66155 | -1.1836 |
| line_passes | "line passes" | 17371 | 80.809 | 18.607 | -0.75667 | -0.52873 |
| line_rates | "line rates" | 834 | 68.386 | 29.423 | -0.64559 | -1.2252 |
is_muon INFO 1D histograms in directory "is_muon" : 3
| ID | Title | # | Mean | RMS | Skewness | Kurtosis |
| n_muons | "# muons" | 458878 | 0.10947 | 0.31223 | 2.5015 | 4.2576 |
......
This diff is collapsed.
......@@ -322,8 +322,8 @@ fit_v0_secondary_vertices INFO 1D histograms in directory "fit_v0_s
| number_of_svs | "NSVs" | 10000 | 0.8616 | 1.8936 | 5.7952 | 74.373 |
gather_selections INFO 1D histograms in directory "gather_selections" : 2
| ID | Title | # | Mean | RMS | Skewness | Kurtosis |
| line_passes | "line passes" | 17411 | 79.535 | 17.770 | -0.81572 | -0.26522 |
| line_rates | "line rates" | 860 | 67.085 | 28.595 | -0.65966 | -1.1917 |
| line_passes | "line passes" | 17411 | 80.741 | 18.707 | -0.77161 | -0.47002 |
| line_rates | "line rates" | 860 | 68.28 | 29.529 | -0.64379 | -1.2325 |
is_muon INFO 1D histograms in directory "is_muon" : 3
| ID | Title | # | Mean | RMS | Skewness | Kurtosis |
| n_muons | "# muons" | 461023 | 0.10963 | 0.31243 | 2.4989 | 4.2447 |
......
......@@ -325,8 +325,8 @@ fit_v0_secondary_vertices INFO 1D histograms in directory "fit_v0_s
| number_of_svs | "NSVs" | 10000 | 0.864 | 1.8964 | 5.7826 | 74.022 |
gather_selections INFO 1D histograms in directory "gather_selections" : 2
| ID | Title | # | Mean | RMS | Skewness | Kurtosis |
| line_passes | "line passes" | 17434 | 79.473 | 17.856 | -0.82602 | -0.22905 |
| line_rates | "line rates" | 866 | 66.982 | 28.602 | -0.64588 | -1.218 |
| line_passes | "line passes" | 17434 | 80.678 | 18.791 | -0.78078 | -0.43713 |
| line_rates | "line rates" | 866 | 68.173 | 29.538 | -0.63037 | -1.2576 |
is_muon INFO 1D histograms in directory "is_muon" : 3
| ID | Title | # | Mean | RMS | Skewness | Kurtosis |
| n_muons | "# muons" | 461016 | 0.10963 | 0.31243 | 2.499 | 4.2448 |
......
......@@ -325,8 +325,8 @@ fit_v0_secondary_vertices INFO 1D histograms in directory "fit_v0_s
| number_of_svs | "NSVs" | 10000 | 0.8562 | 1.8803 | 5.817 | 75.771 |
gather_selections INFO 1D histograms in directory "gather_selections" : 2
| ID | Title | # | Mean | RMS | Skewness | Kurtosis |
| line_passes | "line passes" | 17401 | 79.505 | 17.804 | -0.81668 | -0.26439 |
| line_rates | "line rates" | 844 | 66.636 | 28.666 | -0.61853 | -1.2546 |
| line_passes | "line passes" | 17401 | 80.71 | 18.741 | -0.77234 | -0.46878 |
| line_rates | "line rates" | 844 | 67.816 | 29.604 | -0.60315 | -1.2932 |
is_muon INFO 1D histograms in directory "is_muon" : 3
| ID | Title | # | Mean | RMS | Skewness | Kurtosis |
| n_muons | "# muons" | 458865 | 0.10948 | 0.31224 | 2.5015 | 4.2573 |
......
......@@ -1283,8 +1283,10 @@ def setup_hlt1_node(enablePhysics=True,
with_ut=with_ut,
with_muon=with_muon,
tracking_type=TrackingType.FORWARD)
node = make_dq_node(reconstructed_objects,
reconstructed_objects_forward, line_algorithms)
node = make_dq_node(
reconstructed_objects,
reconstructed_objects_forward,
prefilters=beam_beam_filter)
return node
if not withMCChecking:
......
......@@ -409,15 +409,14 @@ def make_composite_node_with_gec(alg_name,
def make_dq_node(reconstructed_matching,
reconstructed_forward,
line_algorithms,
methods=["forward", "matching", "occupancy", "pv", "velo"]):
methods=["forward", "matching", "occupancy", "pv", "velo"],
prefilters=[]):
# N.B. if more 'methods' are added to the ODQV later, make sure to update Allen/Dumpers/BinaryDumpers/tests/qmtest/lhcb_ODQV.qmt line 35
nodes = [
data_quality_node(
reconstructed_forward if method == "forward" else
reconstructed_matching, line_algorithms, method)
for method in methods
reconstructed_matching, method, prefilters) for method in methods
]
return CompositeNode(
......@@ -427,15 +426,14 @@ def make_dq_node(reconstructed_matching,
force_order=False)
def data_quality_node(reconstructed_objects=None,
line_algorithms=None,
method=""):
def data_quality_node(reconstructed_objects=None, method="", prefilters=[]):
validators = []
if method in ["forward", "matching"]:
validators = [
CompositeNode(
f"data_quality_validation_{method}", [
f"data_quality_validation_{method}",
prefilters + [
data_quality_validation_long(
reconstructed_objects["long_tracks"],
reconstructed_objects["long_track_particles"],
......@@ -447,7 +445,8 @@ def data_quality_node(reconstructed_objects=None,
elif method == "occupancy":
validators = [
CompositeNode(
f"data_quality_validation_{method}", [
f"data_quality_validation_{method}",
prefilters + [
data_quality_validation_occupancy(
f"data_quality_validation_{method}")
],
......@@ -457,7 +456,8 @@ def data_quality_node(reconstructed_objects=None,
elif method == "pv":
validators = [
CompositeNode(
f"data_quality_validation_{method}", [
f"data_quality_validation_{method}",
prefilters + [
data_quality_validation_pv(
reconstructed_objects["long_tracks"],
f"data_quality_validation_{method}")
......@@ -468,7 +468,8 @@ def data_quality_node(reconstructed_objects=None,
elif method == "velo":
validators = [
CompositeNode(
f"data_quality_validation_{method}", [
f"data_quality_validation_{method}",
prefilters + [
data_quality_validation_velo(
reconstructed_objects["long_tracks"],
f"data_quality_validation_{method}")
......
......@@ -457,6 +457,7 @@ def data_quality_validation_occupancy(name="data_quality_validator"):
number_of_events = initialize_number_of_events()
decoded_scifi = decode_scifi()
scifi_xz_seeds = make_seeding_XZ_tracks(decoded_scifi)
decoded_muon = decode_muon()
decoded_calo = decode_calo()
ecal_clusters = make_ecal_clusters(
......@@ -477,6 +478,7 @@ def data_quality_validation_occupancy(name="data_quality_validator"):
"dev_offsets_estimated_input_size"],
dev_offsets_velo_tracks_t=velo_tracks["dev_offsets_all_velo_tracks"],
dev_scifi_hit_offsets_t=decoded_scifi["dev_scifi_hit_offsets"],
dev_scifi_seedsXZ_t=scifi_xz_seeds['seed_xz_number_of_tracks'],
dev_ecal_clusters_offsets_t=ecal_clusters["dev_ecal_cluster_offsets"])
......
......@@ -23,5 +23,9 @@ hlt1_node = setup_hlt1_node(
with_ut=False,
# tracking_type is set to matching, but will output both matching and forward
tracking_type=TrackingType.MATCHING,
with_lumi=False,
enableBGI=False,
withSMOG2=False,
enablePhysics=True,
with_calo=True)
generate(hlt1_node)
......@@ -104,6 +104,7 @@ namespace data_quality_validator_occupancy {
DEVICE_INPUT(dev_offsets_velo_tracks_t, unsigned) dev_offsets_velo_tracks;
DEVICE_INPUT(dev_scifi_hit_offsets_t, unsigned) dev_scifi_hit_offsets;
DEVICE_INPUT(dev_ecal_clusters_offsets_t, unsigned) dev_ecal_clusters_offsets;
DEVICE_INPUT(dev_scifi_seedsXZ_t, unsigned) dev_scifi_seedsXZ;
};
struct data_quality_validator_occupancy_t : public HostAlgorithm, Parameters {
......
......@@ -100,6 +100,7 @@ void data_quality_validator_pv::data_quality_validator_pv_t::output_monitor(
// --> PVs
auto tree = handler.tree("PVs");
auto eventTree = handler.tree("PV_event");
auto PVpairsTree = handler.tree("PV_pairs");
const auto PVs = make_host_buffer<dev_multi_fit_vertices_t>(arguments, context);
const auto n_pvs = make_host_buffer<dev_number_of_multi_fit_vertices_t>(arguments, context);
const auto event_list = make_host_buffer<dev_event_list_t>(arguments, context);
......@@ -120,7 +121,15 @@ void data_quality_validator_pv::data_quality_validator_pv_t::output_monitor(
handler.branch(tree, "cov22", pv_cov[2][2]);
int nPVs;
float PVdistance_min, PVdistance_max, PVdistance_mean;
handler.branch(eventTree, "n_pvs", nPVs);
handler.branch(eventTree, "PVdistance_min", PVdistance_min);
handler.branch(eventTree, "PVdistance_max", PVdistance_max);
handler.branch(eventTree, "PVdistance_mean", PVdistance_mean);
// Fill with the Delta Z distance of each pair of PVs
float PVdelta_z;
handler.branch(PVpairsTree, "PVdelta_z", PVdelta_z);
for (unsigned i = 0; i < event_list.size(); ++i) {
const auto evnum = event_list[i];
......@@ -131,6 +140,19 @@ void data_quality_validator_pv::data_quality_validator_pv_t::output_monitor(
// something like dev_pv_offsets
const unsigned pv_offset = evnum * PV::max_number_vertices;
// If there's 1 or fewer PVs, there's no distance between them!
// - set a dummy number for this
if (nPVs <= 1) {
PVdistance_max = -99.f;
PVdistance_min = -99.f;
PVdistance_mean = -99.f;
}
else { // Otherwise reset the values ready for the upcoming loop
PVdistance_mean = 0.f;
PVdistance_max = 0.f;
PVdistance_min = std::numeric_limits<float>::infinity();
}
for (int i_vertex = 0; i_vertex < nPVs; i_vertex++) {
const auto pv = PVs[i_vertex + pv_offset];
pv_x = pv.position.x;
......@@ -146,6 +168,33 @@ void data_quality_validator_pv::data_quality_validator_pv_t::output_monitor(
pv_cov[2][1] = pv.cov21;
pv_cov[2][2] = pv.cov22;
tree->Fill();
// Now compare this vertex to all previous ones and calculate their distances
if (nPVs > 1) {
for (int j = 0; j < i_vertex; ++j) {
const auto otherVertex = PVs[j + pv_offset];
const float delta_x = otherVertex.position.x - pv_x;
const float delta_y = otherVertex.position.y - pv_y;
const float delta_z = otherVertex.position.z - pv_z;
const float delta_pos = std::sqrt(delta_x * delta_x + delta_y * delta_y + delta_z * delta_z);
if (delta_pos > PVdistance_max) {
PVdistance_max = delta_pos;
}
if (delta_pos < PVdistance_min) {
PVdistance_min = delta_pos;
}
PVdistance_mean += delta_pos;
// Fill with each pair of points
PVdelta_z = delta_z;
PVpairsTree->Fill();
}
}
}
// finally, divide by nPVs to for the mean
if (nPVs > 1) {
PVdistance_mean /= nPVs;
}
eventTree->Fill();
}
......@@ -173,14 +222,16 @@ void data_quality_validator_occupancy::data_quality_validator_occupancy_t::outpu
// --> Occupancy
auto eventTree = handler.tree("occupancy");
const auto scifi_tracks_offsets = make_host_buffer<dev_scifi_hit_offsets_t>(arguments, context);
const auto scifi_seeds = make_host_buffer<dev_scifi_seedsXZ_t>(arguments, context);
const auto velo_offsets_eis = make_host_buffer<dev_velo_offsets_estimated_input_size_t>(arguments, context);
const auto event_velo_tracks_offsets = make_host_buffer<dev_offsets_velo_tracks_t>(arguments, context);
const auto ecal_clusters = make_host_buffer<dev_ecal_clusters_offsets_t>(arguments, context);
const auto muon_offsets = make_host_buffer<dev_station_ocurrences_offset_t>(arguments, context);
const auto event_list = make_host_buffer<dev_event_list_t>(arguments, context);
int n_scifi_hits, n_velo_hits, n_velo_tracks, n_ecal_clusters, n_muon_hits;
int n_scifi_hits, n_scifi_xz_seeds, n_velo_hits, n_velo_tracks, n_ecal_clusters, n_muon_hits;
handler.branch(eventTree, "n_scifi_hits", n_scifi_hits);
handler.branch(eventTree, "n_scifi_xz_seeds", n_scifi_xz_seeds);
handler.branch(eventTree, "n_velo_hits", n_velo_hits);
handler.branch(eventTree, "n_velo_tracks", n_velo_tracks);
handler.branch(eventTree, "n_ecal_clusters", n_ecal_clusters);
......@@ -194,6 +245,7 @@ void data_quality_validator_occupancy::data_quality_validator_occupancy_t::outpu
const auto evnum = event_list[i];
SciFi::ConstHitCount scifi_hit_count {scifi_tracks_offsets.data(), evnum};
n_scifi_hits = scifi_hit_count.event_number_of_hits();
n_scifi_xz_seeds = scifi_seeds.data()[evnum];
const unsigned* module_pair_hit_start = velo_offsets_eis.data() + evnum * Velo::Constants::n_module_pairs;
const unsigned event_hit_start = module_pair_hit_start[0];
......
......@@ -46,6 +46,16 @@ TH1* draw(
const Int_t colourIndex,
std::map<TVirtualPad*, TH1*>& originalHists);
void makeIPplot(
TTree* tree,
TString var,
TVirtualPad* pad,
const TString& fileName,
const bool first,
const Int_t colourIndex,
std::map<TVirtualPad*, TH1*>& originalHists,
const bool forward);
std::pair<TColor*, Int_t> GetColorAndLineStyle(Int_t index);
template<typename... FILENAMES>
......@@ -81,6 +91,7 @@ void DataQualityPlot_Overlay(FILENAMES... files)
std::map<TString, TCanvas*> canvases = {
{"PVcanvas", new TCanvas("PVcanvas", "PVcanvas", 900, 600)},
{"PVcovCanvas", new TCanvas("PVcovCanvas", "PVcovCanvas", 900, 900)},
{"PVdistCanvas", new TCanvas("PVdistCanvas", "PVdistCanvas", 600, 600)},
{"longMatchingCanvas", new TCanvas("longMatchingCanvas", "longMatchingCanvas", 900, 900)},
{"longForwardCanvas", new TCanvas("longForwardCanvas", "longForwardCanvas", 900, 900)},
{"kalmanCovCanvas", new TCanvas("kalmanCovCanvas", "kalmanCovCanvas", 1200, 1200)},
......@@ -90,6 +101,7 @@ void DataQualityPlot_Overlay(FILENAMES... files)
{"PIDkinCanvas", new TCanvas("PIDkinCanvas", "PIDkinCanvas", 1200, 600)},
{"IPmatchingCanvas", new TCanvas("IPmatchingCanvas", "IPmatchingCanvas", 600, 600)},
{"IPforwardCanvas", new TCanvas("IPforwardCanvas", "IPforwardCanvas", 600, 600)},
{"IPresolutionCanvas", new TCanvas("IPresolutionCanvas", "IPresolutionCanvas", 600, 600)},
{"fileCanvas", new TCanvas("fileCanvas", "fileCanvas", 600, 600)}};
for (auto& [name, canvas] : canvases) {
canvas->Divide(canvas->GetWindowWidth() / 300, canvas->GetWindowHeight() / 300);
......@@ -113,11 +125,11 @@ void DataQualityPlot_Overlay(FILENAMES... files)
canvas = canvases["PVcanvas"];
std::vector<Var> PVcanvasVars = {Var("n_pvs", "data_quality_validation_pv/PV_event", 0, 8),
Var("pv_nTracks", "data_quality_validation_pv/PVs", 0, 100),
Var("pv_y:pv_x", "data_quality_validation_pv/PVs", 0.8, 1.4, 0.35, 0.65),
Var("pv_x", "data_quality_validation_pv/PVs", 0.8, 1.4),
Var("pv_y", "data_quality_validation_pv/PVs", 0.35, 0.65),
std::vector<Var> PVcanvasVars = {Var("n_pvs", "data_quality_validation_pv/PV_event", 0, 12),
Var("pv_nTracks", "data_quality_validation_pv/PVs", 0, 20),
Var("pv_y:pv_x", "data_quality_validation_pv/PVs", 0.8, 1.5, -0.1, 0.5),
Var("pv_x", "data_quality_validation_pv/PVs", 0.8, 1.5),
Var("pv_y", "data_quality_validation_pv/PVs", -0.1, 0.5),
Var("pv_z", "data_quality_validation_pv/PVs", -100, 100)};
for (size_t i = 0; i < PVcanvasVars.size(); ++i) {
......@@ -131,8 +143,19 @@ void DataQualityPlot_Overlay(FILENAMES... files)
if (first) {
legends["mu"] = new TLegend(0.6, 0.7, 0.9, 0.9);
}
Float_t mean = hist->GetMean();
legends["mu"]->AddEntry(hist, Form("#mu = %.2f", mean), "l");
hist->ResetStats();
// make a poisson shape, following
// https://root-forum.cern.ch/t/fitting-a-poisson-distribution-to-a-histogram/12078/2
TF1* f1 = new TF1(
"f1",
"[0]*TMath::Power(([1]/[2]),(x/[2]))*(TMath::Exp(-([1]/[2])))/TMath::Gamma((x/[2])+1.)",
var.xmin,
var.xmax);
f1->SetParameters(1, 1, 1); // you MUST set non-zero initial values for parameters
hist->Fit("f1", "RQ"); // "R" = fit between "xmin" and "xmax" of the "f1"
Float_t mean = f1->GetParameter(1);
legends["mu"]->AddEntry(hist, Form("#mu_{poisson} = %.2f", mean), "l");
delete f1;
if (fileName == fileList.back()) {
legends["mu"]->Draw();
......@@ -161,11 +184,43 @@ void DataQualityPlot_Overlay(FILENAMES... files)
draw(tree, var, pad, fileName, first, colourIndex, originalHists);
}
std::cout << " complete!" << std::endl;
std::cout << "Generating PV distance canvas ...";
canvas = canvases["PVdistCanvas"];
std::vector<Var> PVdistCanvasVars = {Var("PVdistance_min", "data_quality_validation_pv/PV_event", 0, 200),
Var("PVdistance_max", "data_quality_validation_pv/PV_event", 0, 300),
Var("PVdistance_mean", "data_quality_validation_pv/PV_event", 0, 200),
Var("PVdelta_z", "data_quality_validation_pv/PV_pairs", -6, 6)};
for (size_t i = 0; i < PVdistCanvasVars.size(); ++i) {
TVirtualPad* pad = canvas->cd(i + 1);
Var var = PVdistCanvasVars[i];
TTree* tree = dynamic_cast<TTree*>(file->Get(var.treeName));
TH1* hist = draw(tree, var, pad, fileName, first, colourIndex, originalHists);
if (i < 3) {
if (first) {
legends[var.varName] = new TLegend(0.5, 0.6, 0.9, 0.9);
}
hist->ResetStats();
Float_t mean = hist->GetMean();
Float_t lessThan2PVs = 1.f * tree->GetEntries(var.varName + "<0") / tree->GetEntries();
legends[var.varName]->AddEntry(hist, Form("#mu = %.2f; <2 PV rate = %.2f%%", mean, lessThan2PVs * 100), "l");
if (fileName == fileList.back()) {
legends[var.varName]->Draw();
}
}
}
std::cout << " complete!" << std::endl;
std::cout << "Generating long tracks canvas (matching) ...";
std::vector<Var> longVars = {
Var("n_long_tracks", "data_quality_validation_matching/long_tracks_event", 0, 30),
Var("n_long_tracks", "data_quality_validation_matching/long_tracks_event", 1, 140),
Var("qop", "data_quality_validation_matching/long_track_particles", -4e-4, 4e-4),
Var("pt", "data_quality_validation_matching/long_track_particles", 0, 5000),
Var("tx", "data_quality_validation_matching/long_track_particles", -0.12, 0.12),
......@@ -185,7 +240,19 @@ void DataQualityPlot_Overlay(FILENAMES... files)
}
TTree* tree = dynamic_cast<TTree*>(file->Get(var.treeName));
draw(tree, var, pad, fileName, first, colourIndex, originalHists);
TH1* hist = draw(tree, var, pad, fileName, first, colourIndex, originalHists);
if (i == 0) {
if (first) {
legends["nLongTracks_Matching"] = new TLegend(0.6, 0.7, 0.9, 0.9);
}
hist->ResetStats();
Float_t mean = hist->GetMean();
legends["nLongTracks_Matching"]->AddEntry(hist, Form("#mu = %.2f", mean), "l");
if (fileName == fileList.back()) {
legends["nLongTracks_Matching"]->Draw();
}
}
}
std::cout << " complete!" << std::endl;
......@@ -203,7 +270,19 @@ void DataQualityPlot_Overlay(FILENAMES... files)
var.forward = true;
TTree* tree = dynamic_cast<TTree*>(file->Get(var.treeName));
draw(tree, var, pad, fileName, first, colourIndex, originalHists);
TH1* hist = draw(tree, var, pad, fileName, first, colourIndex, originalHists);
if (i == 0) {
if (first) {
legends["nLongTracks_Forward"] = new TLegend(0.6, 0.7, 0.9, 0.9);
}
hist->ResetStats();
Float_t mean = hist->GetMean();
legends["nLongTracks_Forward"]->AddEntry(hist, Form("#mu = %.2f", mean), "l");
if (fileName == fileList.back()) {
legends["nLongTracks_Forward"]->Draw();
}
}
}
std::cout << " complete!" << std::endl;
......@@ -231,7 +310,7 @@ void DataQualityPlot_Overlay(FILENAMES... files)
std::cout << "Generating velo canvas ...";
std::vector<Var> veloVars = {Var("n_velo_hits", "data_quality_validation_occupancy/occupancy", 0, 3000),
Var("n_velo_tracks", "data_quality_validation_occupancy/occupancy", -0.5, 249.5),
Var("n_velo_tracks", "data_quality_validation_occupancy/occupancy", -0.5, 1000),
Var("n_hits_per_track", "data_quality_validation_velo/velo_states", 0, 16),
Var("tx", "data_quality_validation_velo/velo_states", -0.3, 0.3),
Var("ty", "data_quality_validation_velo/velo_states", -0.3, 0.3),
......@@ -252,15 +331,16 @@ void DataQualityPlot_Overlay(FILENAMES... files)
TH1* hist = draw(tree, var, pad, fileName, first, colourIndex, originalHists);
if (i == 2) {
if (i == 1 or i == 2) {
if (first) {
legends["nHitsPerTrack"] = new TLegend(0.6, 0.7, 0.9, 0.9);
legends[var.varName] = new TLegend(0.6, 0.7, 0.9, 0.9);
}
hist->ResetStats();
Float_t mean = hist->GetMean();
legends["nHitsPerTrack"]->AddEntry(hist, Form("#mu = %.2f", mean), "l");
legends[var.varName]->AddEntry(hist, Form("#mu = %.2f", mean), "l");
if (fileName == fileList.back()) {
legends["nHitsPerTrack"]->Draw();
legends[var.varName]->Draw();
}
}
}
......@@ -269,11 +349,12 @@ void DataQualityPlot_Overlay(FILENAMES... files)
std::cout << "Generating occupancy canvas ...";
std::vector<Var> occupancyVars = {
Var("n_velo_hits", "data_quality_validation_occupancy/occupancy", 0, 3000),
Var("n_scifi_hits", "data_quality_validation_occupancy/occupancy", 0, 6000),
Var("n_ecal_clusters", "data_quality_validation_occupancy/occupancy", 0, 60),
Var("n_muon_hits", "data_quality_validation_occupancy/occupancy", 0, 400),
Var("n_scifi_hits:n_velo_hits", "data_quality_validation_occupancy/occupancy", 0, 3000, 0, 6000),
Var("n_velo_hits", "data_quality_validation_occupancy/occupancy", 1, 30000),
Var("n_scifi_hits", "data_quality_validation_occupancy/occupancy", 1, 60000),
Var("n_scifi_xz_seeds", "data_quality_validation_occupancy/occupancy", 1, 800),
Var("n_ecal_clusters", "data_quality_validation_occupancy/occupancy", 1, 600),
Var("n_muon_hits", "data_quality_validation_occupancy/occupancy", 1, 4000),
Var("n_scifi_hits:n_velo_hits", "data_quality_validation_occupancy/occupancy", 1, 3000, 1, 6000),
};
canvas = canvases["occupancyCanvas"];
......@@ -283,8 +364,23 @@ void DataQualityPlot_Overlay(FILENAMES... files)
Var var = occupancyVars[i];
TTree* tree = dynamic_cast<TTree*>(file->Get(var.treeName));
var.logy = true;
draw(tree, var, pad, fileName, first, colourIndex, originalHists);
if (i != 5) {
var.logy = true;
}
TH1* hist = draw(tree, var, pad, fileName, first, colourIndex, originalHists);
if (i < 4) {
if (first) {
legends[var.varName] = new TLegend(0.6, 0.7, 0.9, 0.9);
}
hist->ResetStats();
Float_t mean = hist->GetMean();
legends[var.varName]->AddEntry(hist, Form("#mu = %.2f", mean), "l");
if (fileName == fileList.back()) {
legends[var.varName]->Draw();
}
}
}
std::cout << " complete!" << std::endl;
......@@ -339,7 +435,6 @@ void DataQualityPlot_Overlay(FILENAMES... files)
for (size_t i = 0; i < PIDkinVars.size(); ++i) {
TVirtualPad* pad = canvas->cd(i + 1);
Var var = PIDkinVars[i];
TTree* tree = dynamic_cast<TTree*>(file->Get(var.treeName));
TLegend* leg = nullptr;
leg = new TLegend();
if (i < 4) {
......@@ -357,6 +452,7 @@ void DataQualityPlot_Overlay(FILENAMES... files)
else {
var.cut = "is_electron==1";
}
TTree* tree = dynamic_cast<TTree*>(file->Get(var.treeName));
draw(tree, var, pad, fileName, first, colourIndex, originalHists);
leg->SetTextColor(2);
if (fileName == fileList.front()) {
......@@ -367,8 +463,8 @@ void DataQualityPlot_Overlay(FILENAMES... files)
std::cout << " complete!" << std::endl;
std::cout << "Generating IP canvas (matching)...";
std::vector<Var> IPVars = {Var("ip_x", "data_quality_validation_matching/long_track_particles", -4, 4),
Var("ip_y", "data_quality_validation_matching/long_track_particles", -4, 4),
std::vector<Var> IPVars = {Var("ip_x", "data_quality_validation_matching/long_track_particles", -0.5, 0.5),
Var("ip_y", "data_quality_validation_matching/long_track_particles", -0.5, 0.5),
Var("ip_chi2", "data_quality_validation_matching/long_track_particles", -1, 1000),
Var("chi2", "data_quality_validation_matching/long_track_particles", 0, 100)};
IPVars[2].logy = true;
......@@ -380,7 +476,19 @@ void DataQualityPlot_Overlay(FILENAMES... files)
Var var = IPVars[i];
TTree* tree = dynamic_cast<TTree*>(file->Get(var.treeName));
draw(tree, var, pad, fileName, first, colourIndex, originalHists);
TH1* hist = draw(tree, var, pad, fileName, first, colourIndex, originalHists);
if (i < 2) {
if (first) {
legends[var.varName + "_matching"] = new TLegend(0.6, 0.7, 0.9, 0.9);
}
hist->ResetStats();
Float_t width = hist->GetRMS();
legends[var.varName + "_matching"]->AddEntry(hist, Form("#sigma = %.4f", width), "l");
if (fileName == fileList.back()) {
legends[var.varName + "_matching"]->Draw();
}
}
}
std::cout << " complete!" << std::endl;
......@@ -391,10 +499,56 @@ void DataQualityPlot_Overlay(FILENAMES... files)
for (size_t i = 0; i < IPVars.size(); ++i) {
TVirtualPad* pad = canvas->cd(i + 1);
Var var = IPVars[i];
var.treeName.ReplaceAll("matching", "forward");
TTree* tree = dynamic_cast<TTree*>(file->Get(var.treeName));
var.forward = true;
draw(tree, var, pad, fileName, first, colourIndex, originalHists);
TH1* hist = draw(tree, var, pad, fileName, first, colourIndex, originalHists);
if (i < 2) {
if (first) {
legends[var.varName + "_forward"] = new TLegend(0.6, 0.7, 0.9, 0.9);
}
hist->ResetStats();
Float_t width = hist->GetRMS();
legends[var.varName + "_forward"]->AddEntry(hist, Form("#sigma = %.4f", width), "l");
if (fileName == fileList.back()) {
legends[var.varName + "_forward"]->Draw();
}
}
}
std::cout << " complete!" << std::endl;
std::cout << "Generating IP resolution canvas ...";
std::vector<TString> IPresoVars = {"ip_x", "ip_x", "ip_y", "ip_y"};
std::vector<TString> Trees = {"data_quality_validation_matching/long_track_particles",
"data_quality_validation_forward/long_track_particles",
"data_quality_validation_matching/long_track_particles",
"data_quality_validation_forward/long_track_particles"};
canvas = canvases["IPresolutionCanvas"];
for (size_t i = 0; i < IPresoVars.size(); ++i) {
TVirtualPad* pad = canvas->cd(i + 1);
TString var = IPresoVars[i];
TTree* tree = dynamic_cast<TTree*>(file->Get(Trees[i]));
TLegend* leg = new TLegend();
const bool forward = Trees[i].Contains("forward");
if (forward) {
leg->SetHeader("Forward", "C");
}
else {
leg->SetHeader("Matching", "C");
}
makeIPplot(tree, var, pad, fileName, first, colourIndex, originalHists, forward);
leg->SetTextColor(2);
if (fileName == fileList.back()) {
leg->Draw();
}
}
std::cout << " complete!" << std::endl;
......@@ -492,7 +646,7 @@ TH1* draw(
Int_t nBins = 100;
if (var.varName.BeginsWith("n_") or var.varName == "nPVs") {
nBins = var.xmax - var.xmin;
if (nBins > 100) {
if (nBins > 200) {
nBins = 100;
}
}
......@@ -542,6 +696,67 @@ TH1* draw(
return hist;
}
/*-------------------------------------------------------------------------*/
void makeIPplot(
TTree* tree,
TString var,
TVirtualPad* pad,
const TString& fileName,
const bool first,
const Int_t colourIndex,
std::map<TVirtualPad*, TH1*>& originalHists,
const bool forward)
{
// These bin edges match the TDR Fig.30
std::vector<Float_t> pTbinEdges = {0.0f, 0.4f, 0.6f, 0.8f, 1.0f, 1.2f, 1.4f, 1.6f, 1.8f, 2.0f};
if (forward) {
pTbinEdges = {0.0f, 0.4f, 0.6f, 0.8f, 1.0f, 1.25f};
}
TString name =
Form("%s_canvas_%s_%s_%s", var.Data(), tree->GetName(), fileName.Data(), forward ? "forward" : "matching");
name.ReplaceAll("/", "_");
name.ReplaceAll(".root", "");
TH1F* IPplot = new TH1F(
name.Data(),
Form(";1/p_{T} [c/GeV];IP_{%s} resolution [#mum]", var == "ip_x" ? "x" : "y"),
pTbinEdges.size() - 1,
pTbinEdges.data());
for (size_t i_ptBin = 0; i_ptBin < pTbinEdges.size() - 1; ++i_ptBin) {
// find the IPs resolutions and fill the IPplot hist
TH1F IPcalculationHist("IPcalculationHist", "", 60, -0.1, 0.1);
TCut cut = Form("1000/pt > %f && 1000/pt <= %f", pTbinEdges[i_ptBin], pTbinEdges[i_ptBin + 1]);
// cut.Print();
tree->Draw(Form("%s >> IPcalculationHist", var.Data()), cut, "GOFF");
if (IPcalculationHist.Integral(1, 60) > 0) {
TFitResultPtr fit = IPcalculationHist.Fit("gaus", "SQ", "", -0.1, 0.1);
IPplot->SetBinContent(i_ptBin + 1, fit->Value(2) * 1000);
IPplot->SetBinError(i_ptBin + 1, fit->Error(2) * 1000);
}
else {
IPplot->SetBinContent(i_ptBin + 1, 0.f);
IPplot->SetBinError(i_ptBin + 1, 0.f);
}
}
if (not first) {
const Double_t yMax = IPplot->GetBinContent(IPplot->GetMaximumBin());
const Double_t factor = 1.1;
if (yMax * factor > originalHists[pad]->GetMaximum()) {
originalHists[pad]->SetMaximum(yMax * factor);
}
}
auto [colour, style] = GetColorAndLineStyle(colourIndex);
IPplot->SetLineColor(colour->GetNumber());
IPplot->SetLineStyle(style);
if (first) {
originalHists[pad] = IPplot;
}
TString drawOption = Form("%s", first ? "E1" : "SAME");
IPplot->Draw(drawOption.Data());
IPplot->SetMarkerStyle(0);
return;
}
/*-------------------------------------------------------------------------*/
std::pair<TColor*, Int_t> GetColorAndLineStyle(Int_t index)
{
static map<std::array<Int_t, 3>, TColor*> cache;
......@@ -579,7 +794,7 @@ std::pair<TColor*, Int_t> GetColorAndLineStyle(Int_t index)
void DataQualityPlot_Overlay()
{
TString isQMtest = std::getenv("QMTTEST_NAME");
if (isQMtest == "lhcb_ODQV_plot") {
if (isQMtest.Contains("lhcb_ODQV_plot")) {
DataQualityPlot_Overlay("allen_odqv_qmtest.root");
}
else {
......