diff --git a/jsonpog-integration/.gitlab-ci.yml b/jsonpog-integration/.gitlab-ci.yml new file mode 100644 index 0000000000000000000000000000000000000000..e4bce2f119652c2b13f9002a06c09dc8d0256f5e --- /dev/null +++ b/jsonpog-integration/.gitlab-ci.yml @@ -0,0 +1,97 @@ +default: + image: python:3.9 + +variables: + PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" + CHANGE_REPORT: change_report.md + SCHEMA_REPORT: schema_report.md + SUMMARY_REPORT: summary.md + +workflow: + rules: + # avoid duplicate pipelines when pushing to an open MR + - if: $CI_PIPELINE_SOURCE == "merge_request_event" + - if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS + when: never + - if: $CI_COMMIT_BRANCH + + +setup-env: + stage: build + before_script: + - python -m venv venv + - source venv/bin/activate + script: + - python -m pip install correctionlib rich pandas "pydantic<2" + cache: + paths: &venv_cache + - .cache/pip + - venv/ + policy: push + + +test-mr: + stage: test + cache: + paths: *venv_cache + policy: pull + before_script: + - source venv/bin/activate + script: + - git clone https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.cern.ch/cms-nanoAOD/jsonpog-integration.git -b master cms-nanoAOD-repo --depth 1 + - script/testMR.sh . cms-nanoAOD-repo + artifacts: + # using variable here doesn't work (gitlab bug) + paths: [change_report.md, schema_report.md, summary.md] + expose_as: "validation report" + when: always + only: + - merge_requests + +comment-mr: + stage: test + needs: [test-mr] + when: always + script: + - script/gitlab_post_comment.py $SUMMARY_REPORT + only: + - merge_requests + +test: + stage: test + cache: + paths: *venv_cache + policy: pull + before_script: + - source venv/bin/activate + script: + - script/validateAll.sh . + except: + - merge_requests + +generate-summary: + stage: deploy + cache: + paths: *venv_cache + policy: pull + before_script: + - source venv/bin/activate + script: + - script/generate_html.py -i ./POG/ -o ./public/commonJSONSFs/ + only: + - master@cms-nanoAOD/jsonpog-integration + artifacts: + paths: + - public + +# Deploy the pages generated to DFS (from https://gitlab.cern.ch/gitlabci-examples/deploy_dfs/) +# DFS website location+credientials are in CI/CD variables +dfsdeploy: + stage: deploy + needs: [generate-summary] + image: gitlab-registry.cern.ch/ci-tools/ci-web-deployer:latest + only: + - master@cms-nanoAOD/jsonpog-integration + script: + - deploy-dfs + diff --git a/jsonpog-integration/POG/BTV/2016postVFP_UL/btagging.json.gz b/jsonpog-integration/POG/BTV/2016postVFP_UL/btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..a88d6eda19dd7b6d2dbb3970ae87d2c9d4a03efe Binary files /dev/null and b/jsonpog-integration/POG/BTV/2016postVFP_UL/btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2016postVFP_UL/ctagging.json.gz b/jsonpog-integration/POG/BTV/2016postVFP_UL/ctagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..34174dc2889e8a092eefd3e940f2c1a90f550e5a Binary files /dev/null and b/jsonpog-integration/POG/BTV/2016postVFP_UL/ctagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2016postVFP_UL/subjet_btagging.json.gz b/jsonpog-integration/POG/BTV/2016postVFP_UL/subjet_btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..8eb52731eeb8bbde545152b652de5228050a249d Binary files /dev/null and b/jsonpog-integration/POG/BTV/2016postVFP_UL/subjet_btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2016preVFP_UL/btagging.json.gz b/jsonpog-integration/POG/BTV/2016preVFP_UL/btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..f2770eca4bf57ab613d344d556cfc0090d4691b0 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2016preVFP_UL/btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2016preVFP_UL/ctagging.json.gz b/jsonpog-integration/POG/BTV/2016preVFP_UL/ctagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..2c4d3a70b5c10a49b492394266bfed1c9b352f96 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2016preVFP_UL/ctagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2016preVFP_UL/subjet_btagging.json.gz b/jsonpog-integration/POG/BTV/2016preVFP_UL/subjet_btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..e7ec330b86449a4b8d6a03e12069a2c5d5b8e82d Binary files /dev/null and b/jsonpog-integration/POG/BTV/2016preVFP_UL/subjet_btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2017_UL/btagging.json.gz b/jsonpog-integration/POG/BTV/2017_UL/btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..b6b5241ffef619ae4a3c4d5ac9df64db80dbb7f5 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2017_UL/btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2017_UL/ctagging.json.gz b/jsonpog-integration/POG/BTV/2017_UL/ctagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..c5211e2d253190656c8e4916531a63436a4641b1 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2017_UL/ctagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2017_UL/subjet_btagging.json.gz b/jsonpog-integration/POG/BTV/2017_UL/subjet_btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..81948c42ef1fc2129bcb9772dd26341105f6dc29 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2017_UL/subjet_btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2018_UL/btagging.json.gz b/jsonpog-integration/POG/BTV/2018_UL/btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..38c9b734f3a5aa99d66d930bc6fd64590ebe840b Binary files /dev/null and b/jsonpog-integration/POG/BTV/2018_UL/btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2018_UL/ctagging.json.gz b/jsonpog-integration/POG/BTV/2018_UL/ctagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..e6ca17ec647ed3871800c15ea7d5e09d7ad449f4 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2018_UL/ctagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2018_UL/subjet_btagging.json.gz b/jsonpog-integration/POG/BTV/2018_UL/subjet_btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..8173131d5351a1d0289f09deab354a034141a492 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2018_UL/subjet_btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2022_27Jun2023/btagging.json.gz b/jsonpog-integration/POG/BTV/2022_27Jun2023/btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..24085e79508d96a9c7e18bd4b6875227b4ca5485 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2022_27Jun2023/btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2022_27Jun2023/ctagging.json.gz b/jsonpog-integration/POG/BTV/2022_27Jun2023/ctagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..743c6fa3c23591584e3d269f3ea840b569493191 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2022_27Jun2023/ctagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2022_27Jun2023EE/btagging.json.gz b/jsonpog-integration/POG/BTV/2022_27Jun2023EE/btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..d54cfaeadc7957c71754618314a53a4f61d65f4d Binary files /dev/null and b/jsonpog-integration/POG/BTV/2022_27Jun2023EE/btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2022_27Jun2023EE/ctagging.json.gz b/jsonpog-integration/POG/BTV/2022_27Jun2023EE/ctagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..93aad50cfdc8c00821227dee0e41ae9d277a7444 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2022_27Jun2023EE/ctagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2022_Summer22/btagging.json.gz b/jsonpog-integration/POG/BTV/2022_Summer22/btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..736045bec980701099054cec54a35604af20e27a Binary files /dev/null and b/jsonpog-integration/POG/BTV/2022_Summer22/btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2022_Summer22/ctagging.json.gz b/jsonpog-integration/POG/BTV/2022_Summer22/ctagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..615a741ee7f476c47f31e410ce8176962e0d244a Binary files /dev/null and b/jsonpog-integration/POG/BTV/2022_Summer22/ctagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2022_Summer22EE/btagging.json.gz b/jsonpog-integration/POG/BTV/2022_Summer22EE/btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..f7467d8aaa2c75317e0d1acf3db641db56a44cd3 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2022_Summer22EE/btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2022_Summer22EE/ctagging.json.gz b/jsonpog-integration/POG/BTV/2022_Summer22EE/ctagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..1b2d58ef01847bc703b3b7faece3bfda7d927379 Binary files /dev/null and b/jsonpog-integration/POG/BTV/2022_Summer22EE/ctagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2023_Summer23/btagging.json.gz b/jsonpog-integration/POG/BTV/2023_Summer23/btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..e8ed48f8627a43c3d3812ab6aff3ee67e8eabd0b Binary files /dev/null and b/jsonpog-integration/POG/BTV/2023_Summer23/btagging.json.gz differ diff --git a/jsonpog-integration/POG/BTV/2023_Summer23BPix/btagging.json.gz b/jsonpog-integration/POG/BTV/2023_Summer23BPix/btagging.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..7bb939ce420cda482eb0e0eef16e248cce25450e Binary files /dev/null and b/jsonpog-integration/POG/BTV/2023_Summer23BPix/btagging.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2016postVFP_UL/electron.json.gz b/jsonpog-integration/POG/EGM/2016postVFP_UL/electron.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..9b45bd5156c059a9f6c092186fc4238bab360efa Binary files /dev/null and b/jsonpog-integration/POG/EGM/2016postVFP_UL/electron.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2016postVFP_UL/photon.json.gz b/jsonpog-integration/POG/EGM/2016postVFP_UL/photon.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..c4869de23b8dfe52a8698c34c68836c1fafcdc7f Binary files /dev/null and b/jsonpog-integration/POG/EGM/2016postVFP_UL/photon.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2016preVFP_UL/electron.json.gz b/jsonpog-integration/POG/EGM/2016preVFP_UL/electron.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..8ca51b26d5db0af49fb5d8c7d7e7a2fdd8549ad8 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2016preVFP_UL/electron.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2016preVFP_UL/photon.json.gz b/jsonpog-integration/POG/EGM/2016preVFP_UL/photon.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..3c617bbd1a5579c3e7bacf3edfd8384ae99ea876 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2016preVFP_UL/photon.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2017_UL/electron.json.gz b/jsonpog-integration/POG/EGM/2017_UL/electron.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..12d229e1092eb6ff8c751be170f36353e79dad2c Binary files /dev/null and b/jsonpog-integration/POG/EGM/2017_UL/electron.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2017_UL/photon.json.gz b/jsonpog-integration/POG/EGM/2017_UL/photon.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..2e8ae2aedff26b9b5e5e84cca56f9459661bbe0a Binary files /dev/null and b/jsonpog-integration/POG/EGM/2017_UL/photon.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2018_UL/electron.json.gz b/jsonpog-integration/POG/EGM/2018_UL/electron.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..7a05ff50a94fd1dae8897734782784669a67854a Binary files /dev/null and b/jsonpog-integration/POG/EGM/2018_UL/electron.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2018_UL/photon.json.gz b/jsonpog-integration/POG/EGM/2018_UL/photon.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..d12e6fc615f196852221e26cd5f52b1213d2ebc3 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2018_UL/photon.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2022_Summer22/electron.json.gz b/jsonpog-integration/POG/EGM/2022_Summer22/electron.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..8ccee93bedf64bd00bdc5fb32f89240580d61dba Binary files /dev/null and b/jsonpog-integration/POG/EGM/2022_Summer22/electron.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2022_Summer22/electronHlt.json.gz b/jsonpog-integration/POG/EGM/2022_Summer22/electronHlt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..bf01495308ad49920916a1fbffdcbaf8beebd918 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2022_Summer22/electronHlt.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2022_Summer22/electronSS.json.gz b/jsonpog-integration/POG/EGM/2022_Summer22/electronSS.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..278c95808a9b655d3249e15566f8e40bd39296ec Binary files /dev/null and b/jsonpog-integration/POG/EGM/2022_Summer22/electronSS.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2022_Summer22/photon.json.gz b/jsonpog-integration/POG/EGM/2022_Summer22/photon.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..faad5c388dbc3f550469e8a52e2538f3199333bc Binary files /dev/null and b/jsonpog-integration/POG/EGM/2022_Summer22/photon.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2022_Summer22/photonSS.json.gz b/jsonpog-integration/POG/EGM/2022_Summer22/photonSS.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..c055b34b749b04760f91be3c67b9857727e71d17 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2022_Summer22/photonSS.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2022_Summer22EE/electron.json.gz b/jsonpog-integration/POG/EGM/2022_Summer22EE/electron.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..15b23143cf9a088ea1ec826f3bfe17cd70d82924 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2022_Summer22EE/electron.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2022_Summer22EE/electronHlt.json.gz b/jsonpog-integration/POG/EGM/2022_Summer22EE/electronHlt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..d3ccb500e5b440e0aede3c7906da830d1661216c Binary files /dev/null and b/jsonpog-integration/POG/EGM/2022_Summer22EE/electronHlt.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2022_Summer22EE/electronSS.json.gz b/jsonpog-integration/POG/EGM/2022_Summer22EE/electronSS.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..06e10f8760de4de5552173f971929e72296d73d5 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2022_Summer22EE/electronSS.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2022_Summer22EE/photon.json.gz b/jsonpog-integration/POG/EGM/2022_Summer22EE/photon.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..ece28f9342e388fb432774fbbe59e654edb4bda2 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2022_Summer22EE/photon.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2022_Summer22EE/photonSS.json.gz b/jsonpog-integration/POG/EGM/2022_Summer22EE/photonSS.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..8170b75c8aeba68886206114504aed51fb7a0f70 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2022_Summer22EE/photonSS.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2023_Summer23/electron.json.gz b/jsonpog-integration/POG/EGM/2023_Summer23/electron.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..381e95c45f3495ced3ca8490434e3d201f10f358 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2023_Summer23/electron.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2023_Summer23/electronHlt.json.gz b/jsonpog-integration/POG/EGM/2023_Summer23/electronHlt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..2c46c820c31a5018e72500a3ef819fdcd43ddf09 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2023_Summer23/electronHlt.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2023_Summer23/photon.json.gz b/jsonpog-integration/POG/EGM/2023_Summer23/photon.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..0dd95e31c46f80100c8f8b67ebfa96771bb6aff0 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2023_Summer23/photon.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2023_Summer23BPix/electron.json.gz b/jsonpog-integration/POG/EGM/2023_Summer23BPix/electron.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..5aa13380d33758eefd206f7030058fd0d94f1df1 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2023_Summer23BPix/electron.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2023_Summer23BPix/electronHlt.json.gz b/jsonpog-integration/POG/EGM/2023_Summer23BPix/electronHlt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..9d41154890a91a7d53ebc9483e1817fbd16cac4c Binary files /dev/null and b/jsonpog-integration/POG/EGM/2023_Summer23BPix/electronHlt.json.gz differ diff --git a/jsonpog-integration/POG/EGM/2023_Summer23BPix/photon.json.gz b/jsonpog-integration/POG/EGM/2023_Summer23BPix/photon.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..6c9a97dcdf1cb4d14b0904cb3d6c3c74c1461681 Binary files /dev/null and b/jsonpog-integration/POG/EGM/2023_Summer23BPix/photon.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016_EOY/2016_jmar.json.gz b/jsonpog-integration/POG/JME/2016_EOY/2016_jmar.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..46a3ee127ad6b6f9f892df3d88479ccb9a7620a5 Binary files /dev/null and b/jsonpog-integration/POG/JME/2016_EOY/2016_jmar.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016postVFP_UL/fatJet_jerc.json.gz b/jsonpog-integration/POG/JME/2016postVFP_UL/fatJet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..9f48d28bd75c094ff6c42ab33174fd5442f0b60f Binary files /dev/null and b/jsonpog-integration/POG/JME/2016postVFP_UL/fatJet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016postVFP_UL/jet_jerc.json.gz b/jsonpog-integration/POG/JME/2016postVFP_UL/jet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..c529ecb615e863b25bb5732b125045c1ff77420a Binary files /dev/null and b/jsonpog-integration/POG/JME/2016postVFP_UL/jet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016postVFP_UL/jetvetomaps.json.gz b/jsonpog-integration/POG/JME/2016postVFP_UL/jetvetomaps.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..41f891fef8c1f1a51ab034da46f791dc814bb1dc Binary files /dev/null and b/jsonpog-integration/POG/JME/2016postVFP_UL/jetvetomaps.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016postVFP_UL/jmar.json.gz b/jsonpog-integration/POG/JME/2016postVFP_UL/jmar.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..8d1ec769cbec5ee0497d75ebc39932dc78b2c2e2 Binary files /dev/null and b/jsonpog-integration/POG/JME/2016postVFP_UL/jmar.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016postVFP_UL/met.json.gz b/jsonpog-integration/POG/JME/2016postVFP_UL/met.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..eef646a9214551f767a0c5e78266355bcd8582eb Binary files /dev/null and b/jsonpog-integration/POG/JME/2016postVFP_UL/met.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016preVFP_UL/fatJet_jerc.json.gz b/jsonpog-integration/POG/JME/2016preVFP_UL/fatJet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..31a7916512d152e37a9b43018a2694d9661c2379 Binary files /dev/null and b/jsonpog-integration/POG/JME/2016preVFP_UL/fatJet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016preVFP_UL/jet_jerc.json.gz b/jsonpog-integration/POG/JME/2016preVFP_UL/jet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..693cb994e2f1b1ac78a13fe6664580d9beaaae21 Binary files /dev/null and b/jsonpog-integration/POG/JME/2016preVFP_UL/jet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016preVFP_UL/jetvetomaps.json.gz b/jsonpog-integration/POG/JME/2016preVFP_UL/jetvetomaps.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..41f891fef8c1f1a51ab034da46f791dc814bb1dc Binary files /dev/null and b/jsonpog-integration/POG/JME/2016preVFP_UL/jetvetomaps.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016preVFP_UL/jmar.json.gz b/jsonpog-integration/POG/JME/2016preVFP_UL/jmar.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..5fc350a0233d714acb07e446b8a16384f8614411 Binary files /dev/null and b/jsonpog-integration/POG/JME/2016preVFP_UL/jmar.json.gz differ diff --git a/jsonpog-integration/POG/JME/2016preVFP_UL/met.json.gz b/jsonpog-integration/POG/JME/2016preVFP_UL/met.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..91f2bafeed57ab193c3c9d9bd5c4eea73e36313f Binary files /dev/null and b/jsonpog-integration/POG/JME/2016preVFP_UL/met.json.gz differ diff --git a/jsonpog-integration/POG/JME/2017_EOY/2017_jmar.json.gz b/jsonpog-integration/POG/JME/2017_EOY/2017_jmar.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..3756fc1f3d076fd6c2677f6aaaca135fdbd9c791 Binary files /dev/null and b/jsonpog-integration/POG/JME/2017_EOY/2017_jmar.json.gz differ diff --git a/jsonpog-integration/POG/JME/2017_UL/fatJet_jerc.json.gz b/jsonpog-integration/POG/JME/2017_UL/fatJet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..6591fa69f06e328fbfb24678db13d5ddb84532d5 Binary files /dev/null and b/jsonpog-integration/POG/JME/2017_UL/fatJet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2017_UL/jet_jerc.json.gz b/jsonpog-integration/POG/JME/2017_UL/jet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..312e48209c50a8839b19a84a8cc774e8ccd2430d Binary files /dev/null and b/jsonpog-integration/POG/JME/2017_UL/jet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2017_UL/jetvetomaps.json.gz b/jsonpog-integration/POG/JME/2017_UL/jetvetomaps.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..12a3c1c67319480dc75fe36bd0de5efe0fb60f31 Binary files /dev/null and b/jsonpog-integration/POG/JME/2017_UL/jetvetomaps.json.gz differ diff --git a/jsonpog-integration/POG/JME/2017_UL/jmar.json.gz b/jsonpog-integration/POG/JME/2017_UL/jmar.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..138f32044535da33bb766cc54671926540f6ec11 Binary files /dev/null and b/jsonpog-integration/POG/JME/2017_UL/jmar.json.gz differ diff --git a/jsonpog-integration/POG/JME/2017_UL/met.json.gz b/jsonpog-integration/POG/JME/2017_UL/met.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..423e19f487f6962cc9d553a4637342d133d97a63 Binary files /dev/null and b/jsonpog-integration/POG/JME/2017_UL/met.json.gz differ diff --git a/jsonpog-integration/POG/JME/2018_EOY/2018_jmar.json.gz b/jsonpog-integration/POG/JME/2018_EOY/2018_jmar.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..ebe639f6fd27b00d7e34942f1e1b13e8b800e912 Binary files /dev/null and b/jsonpog-integration/POG/JME/2018_EOY/2018_jmar.json.gz differ diff --git a/jsonpog-integration/POG/JME/2018_UL/fatJet_jerc.json.gz b/jsonpog-integration/POG/JME/2018_UL/fatJet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..825207feff5e490af7fadcacbd539f4c3b2f8240 Binary files /dev/null and b/jsonpog-integration/POG/JME/2018_UL/fatJet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2018_UL/jet_jerc.json.gz b/jsonpog-integration/POG/JME/2018_UL/jet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..d2a581db8dba7e0881c345868c5d0d5a6551d70b Binary files /dev/null and b/jsonpog-integration/POG/JME/2018_UL/jet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2018_UL/jetvetomaps.json.gz b/jsonpog-integration/POG/JME/2018_UL/jetvetomaps.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..e3ef747d788b32b1de57c5f504158648c76fda26 Binary files /dev/null and b/jsonpog-integration/POG/JME/2018_UL/jetvetomaps.json.gz differ diff --git a/jsonpog-integration/POG/JME/2018_UL/jmar.json.gz b/jsonpog-integration/POG/JME/2018_UL/jmar.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..5ddcc31dae509931c57a25896a72dac8217f20ee Binary files /dev/null and b/jsonpog-integration/POG/JME/2018_UL/jmar.json.gz differ diff --git a/jsonpog-integration/POG/JME/2018_UL/met.json.gz b/jsonpog-integration/POG/JME/2018_UL/met.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..071bdfbbb0caab72ef850b2000c47706298fa7fe Binary files /dev/null and b/jsonpog-integration/POG/JME/2018_UL/met.json.gz differ diff --git a/jsonpog-integration/POG/JME/2022_Prompt/fatJet_jerc.json.gz b/jsonpog-integration/POG/JME/2022_Prompt/fatJet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..e94670066e8b481c64637088a8b818227f7dd786 Binary files /dev/null and b/jsonpog-integration/POG/JME/2022_Prompt/fatJet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2022_Prompt/jet_jerc.json.gz b/jsonpog-integration/POG/JME/2022_Prompt/jet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..3646a960e46bd7c603dbb0a59051e5a10bfb55ec Binary files /dev/null and b/jsonpog-integration/POG/JME/2022_Prompt/jet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2022_Prompt/jetvetomaps.json.gz b/jsonpog-integration/POG/JME/2022_Prompt/jetvetomaps.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..02fd206a134f5bfb15b438701dc43062f77832e9 Binary files /dev/null and b/jsonpog-integration/POG/JME/2022_Prompt/jetvetomaps.json.gz differ diff --git a/jsonpog-integration/POG/JME/2022_Summer22/fatJet_jerc.json.gz b/jsonpog-integration/POG/JME/2022_Summer22/fatJet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..d6029abff6cf8415c0989ab3bafc4aad48c574d6 Binary files /dev/null and b/jsonpog-integration/POG/JME/2022_Summer22/fatJet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2022_Summer22/jet_jerc.json.gz b/jsonpog-integration/POG/JME/2022_Summer22/jet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..0d9e519e98baf7d023779d54ebf7a6f097a0686f Binary files /dev/null and b/jsonpog-integration/POG/JME/2022_Summer22/jet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2022_Summer22/jetvetomaps.json.gz b/jsonpog-integration/POG/JME/2022_Summer22/jetvetomaps.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..848b5a493a546d09e6a6b853216f1cbb448dd2fe Binary files /dev/null and b/jsonpog-integration/POG/JME/2022_Summer22/jetvetomaps.json.gz differ diff --git a/jsonpog-integration/POG/JME/2022_Summer22EE/fatJet_jerc.json.gz b/jsonpog-integration/POG/JME/2022_Summer22EE/fatJet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..e0d4cc2602b46560ae262203068c382125b9d595 Binary files /dev/null and b/jsonpog-integration/POG/JME/2022_Summer22EE/fatJet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2022_Summer22EE/jet_jerc.json.gz b/jsonpog-integration/POG/JME/2022_Summer22EE/jet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..ec3a233fb9a4fb0b22b19b834a21479cc0027beb Binary files /dev/null and b/jsonpog-integration/POG/JME/2022_Summer22EE/jet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2022_Summer22EE/jetvetomaps.json.gz b/jsonpog-integration/POG/JME/2022_Summer22EE/jetvetomaps.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..9eb1f81fc77abf71c38b7b37538efe28bc1e931d Binary files /dev/null and b/jsonpog-integration/POG/JME/2022_Summer22EE/jetvetomaps.json.gz differ diff --git a/jsonpog-integration/POG/JME/2023_Summer23/fatJet_jerc.json.gz b/jsonpog-integration/POG/JME/2023_Summer23/fatJet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..a643d1f6eeb3f04f7d237ee04de97c87d7ad147d Binary files /dev/null and b/jsonpog-integration/POG/JME/2023_Summer23/fatJet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2023_Summer23/jet_jerc.json.gz b/jsonpog-integration/POG/JME/2023_Summer23/jet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..a79f37d2143af1627ad4120e775a77fd5fa3ba61 Binary files /dev/null and b/jsonpog-integration/POG/JME/2023_Summer23/jet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2023_Summer23/jetvetomaps.json.gz b/jsonpog-integration/POG/JME/2023_Summer23/jetvetomaps.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..fb5ea961a3b66081144b4f674b3182326ee6a5d1 Binary files /dev/null and b/jsonpog-integration/POG/JME/2023_Summer23/jetvetomaps.json.gz differ diff --git a/jsonpog-integration/POG/JME/2023_Summer23BPix/fatJet_jerc.json.gz b/jsonpog-integration/POG/JME/2023_Summer23BPix/fatJet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..014113346ecf6e02c09169166ae7374e69d1e4e9 Binary files /dev/null and b/jsonpog-integration/POG/JME/2023_Summer23BPix/fatJet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2023_Summer23BPix/jet_jerc.json.gz b/jsonpog-integration/POG/JME/2023_Summer23BPix/jet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..23efcdd1226866c56b5ac59978437f00a1b5ecaa Binary files /dev/null and b/jsonpog-integration/POG/JME/2023_Summer23BPix/jet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/2023_Summer23BPix/jetvetomaps.json.gz b/jsonpog-integration/POG/JME/2023_Summer23BPix/jetvetomaps.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..dcd2363e16053fd917d21b27b091bf52ac794da1 Binary files /dev/null and b/jsonpog-integration/POG/JME/2023_Summer23BPix/jetvetomaps.json.gz differ diff --git a/jsonpog-integration/POG/JME/2024_Winter24/jet_jerc.json.gz b/jsonpog-integration/POG/JME/2024_Winter24/jet_jerc.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..5dbd6959c738b3880ae15d3fc564a56367631e1e Binary files /dev/null and b/jsonpog-integration/POG/JME/2024_Winter24/jet_jerc.json.gz differ diff --git a/jsonpog-integration/POG/JME/README.md b/jsonpog-integration/POG/JME/README.md new file mode 100644 index 0000000000000000000000000000000000000000..de58254808a27f8a1b17c04884d87cf928916a13 --- /dev/null +++ b/jsonpog-integration/POG/JME/README.md @@ -0,0 +1,116 @@ +# JetMET POG-recommended corrections + +This repository contains the scale factors (SFs) for heavy object tagging, PUJetID and Quark-Gluon tagging and jet energy corrections and resolutions recommended by the JetMET POG. +More detailed recommendations can be found on this TWiki page: https://twiki.cern.ch/twiki/bin/viewauth/CMS/JetMET#Quick_links_to_current_recommend + +The exact mapping for **JEC and JER** versions is available from https://twiki.cern.ch/twiki/bin/viewauth/CMS/JECDataMC and https://twiki.cern.ch/twiki/bin/view/CMS/JetResolution#JER_Scaling_factors_and_Uncertai +For the JERC-part we currently provide +- single jet energy correction levels (L1Fastjet, L2Relative, L3Absolute, L2L3Residual) +- convenience "compound" correction level available as L1L2L3Res (combining all levels listed above) +- All uncertainty sources as detailed in https://twiki.cern.ch/twiki/bin/view/CMS/JECUncertaintySources (as of now the full set, not the reduced set) +- Jet resolution scale factors + systematics ("nom","up","down") (as of now only for AK4) +- Jet pt resolution parametrisations (as of now only for AK4) + +The .json files are split into YEAR_jmar.json for tagging SFs and \[jet/fatJet\]_jerc.json.gz for jet energy corrections/resolutions. +- Run2: jet\~"AK4PFchs"; fatJet\~"AK8PFPuppi" +- Run3: jet\~"AK4PFPuppi"; fatJet\~"AK8PFPuppi" + +The SFs are meant for the following campaigns: + +| Year folder | MC campaign | Data campaign | +|:------------:|:------------------------:| :----------------------:| +| `2016_EOY` | `RunIISummer16MiniAODv3` | `17Jul2018` | +| `2017_EOY` | `RunIIFall17MiniAODv2` | `31Mar2018` | +| `2018_EOY` | `RunIIAutumn18MiniAOD` | `17Sep2018`/`22Jan2019` | +| `2016preVFP_UL`| `RunIISummer20UL16MiniAODAPVv2` |`21Feb2020`| +| `2016postVFP_UL`| `RunIISummer20UL16MiniAODv2` |`21Feb2020`| +| `2017_UL`| `RunIISummer20UL17MiniAODv2` |`09Aug2019`| +| `2018_UL`| `RunIISummer20UL18MiniAODv2` |`12Nov2019`| +| `2022_Prompt` | Winter22 | Prompt RunCDE | +| `2022_Summer22` | Summer22 | `22Sep2023` (ReReco CD) | +| `2022_Summer22EE` | Summer22EE | `22Sep2023` (ReReco E + Prompt RunFG, with EE leak region vetoed) | +| `2023_Summer23` | Summer23 | Prompt23 RunC (divided into Cv123 and Cv4) | +| `2023_Summer23BPix` | Summer23BPix | Prompt23 RunD | +| `2024_Winter24` | Summer23BPix (sic!) | Prompt24 RunBCD, RunE | + + +## Usage + +Please install the [`correctionlib`](https://github.com/cms-nanoAOD/correctionlib) tool to read these SFs. +Find out the content of the `jmar.json` using +``` +gunzip POG/JME/2017_EOY/2017_jmar.json.gz +correction summary POG/JME/2017_EOY/jmar.json +``` +Example: + +📈 DeepAK8_W_Nominal (v1) +│ Scale factor for DeepAK8 algorithm (nominal and mass decorrelated) for particle W +│ Node counts: Category: 4, Binning: 24 +│ â•â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€Ã¢Â”€───── â–¶ input ─────────────────────────────╮ +│ │ eta (real) │ +│ │ eta of the jet │ +│ │ Range: [-2.4, 2.4) │ +│ ╰──────────────────────────────────────────────────────────────────╯ +│ â•â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€ â–¶ input ─────────────────────────────╮ +│ │ pt (real) │ +│ │ pT of the jet │ +│ │ Range: [200.0, 800.0), overflow ok │ +│ ╰──────────────────────────────────────────────────────────────────╯ +│ â•â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€ â–¶ input ─────────────────────────────╮ +│ │ systematic (string) │ +│ │ systematics: nom, up, down │ +│ │ Values: down, nom, up │ +│ ╰──────────────────────────────────────────────────────────────────╯ +│ â•â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€â”€ â–¶ input ─────────────────────────────╮ +│ │ workingpoint (string) │ +│ │ Working point of the tagger you use (QCD misidentification rate) │ +│ │ Values: 0p5, 1p0, 2p5, 5p0 │ +│ ╰──────────────────────────────────────────────────────────────────╯ +│ â•Ã¢Â”€── â—€ output ───╮ +│ │ weight (real) │ +│ │ No description │ +│ ╰────────────────╯ + +Examples how to evaluate are given in [`examples/jmarExample.py`](../../examples/jmarExample.py) and [`examples/jercExample.py`](../../examples/jercExample.py). +You can load the set of corrections as follows in python as +``` +from correctionlib import _core + +evaluator = _core.CorrectionSet.from_file('2017_jmar.json') + +valsf= evaluator["DeepAK8_Top_Nominal"].evaluate(eta, pt, syst, wp) +``` + +Where `syst='nom'`, `'up'` or `'down'`. +All maps available and the corresponding input parameters can be seen by using the 'correction summary' option mentioned before. + +## MET Phi Corrections +The UL Run II MET Phi Corrections from https://lathomas.web.cern.ch/lathomas/METStuff/XYCorrections/XYMETCorrection_withUL17andUL18andUL16.h can now be used with the correctionlib. This implementation was validated against the CMSSW implementation and an independent implementation of these corrections. + +The corrections depend on the pt and phi of the phi-uncorrected MET, the number of reconstructed primary vertices, and for data also on the run number. To have a similar call method, the call for simulation also expects a run number but this is not used in any way. The inputs can either all be provided as single numbers or as arrays of similar length. The data type for all inputs is (due to technical reasons) currently 'float'. The call to the evaluate methods returns always the corrected quantity i.e. the corrected pt(s) or the corrected phi(s). + +One can load the correction and get the corrected quantities e.g. via +``` +# met_pt: float value or array of phi-uncorrected pt(s) of MET +# met_phi: float value or array of phi-uncorrected phi(s) of MET +# npvs: float value or array of number of reconstructed vertices +# run: float value or array of run numbers (is needed for data and simulation, but will be ignored for simulation) +ceval = correctionlib.CorrectionSet.from_file("2018_UL/met.json.gz") +# simulation +# phi-corrected pts +corrected_pts = ceval["pt_metphicorr_pfmet_mc"].evaluate(met_pt,met_phi,npvs,run) +# phi-corrected phis +corrected_phis = ceval["phi_metphicorr_pfmet_mc"].evaluate(met_pt,met_phi,npvs,run) +# data +# phi-corrected pts +corrected_pts = ceval["pt_metphicorr_pfmet_data"].evaluate(met_pt,met_phi,npvs,run) +# phi-corrected phis +corrected_phis = ceval["phi_metphicorr_pfmet_data"].evaluate(met_pt,met_phi,npvs,run) +``` + +An example script loading and applying the corrections can be found in `examples/metPhiCorrectionExample.py`. The inputs in this example are randomly drawn numbers so the 'corrected' distributions should not be taken too seriously. +## References + +The JMAR POG JSON files are created from https://github.com/cms-jet/JSON_Format +The JERC POG JSON files are created from https://github.com/cms-jet/JECDatabase/tree/master/scripts/JERC2JSON diff --git a/jsonpog-integration/POG/JME/jer_smear.json.gz b/jsonpog-integration/POG/JME/jer_smear.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..bddeffa1a2317ff21eee46b8a1c304238b64fd2c Binary files /dev/null and b/jsonpog-integration/POG/JME/jer_smear.json.gz differ diff --git a/jsonpog-integration/POG/LUM/2016postVFP_UL/puWeights.json.gz b/jsonpog-integration/POG/LUM/2016postVFP_UL/puWeights.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..849cf198b9bfea6ea1a80926166ac9e9772fd417 Binary files /dev/null and b/jsonpog-integration/POG/LUM/2016postVFP_UL/puWeights.json.gz differ diff --git a/jsonpog-integration/POG/LUM/2016preVFP_UL/puWeights.json.gz b/jsonpog-integration/POG/LUM/2016preVFP_UL/puWeights.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..6aaf92265facbba5471855a88dcfd34476bfaab1 Binary files /dev/null and b/jsonpog-integration/POG/LUM/2016preVFP_UL/puWeights.json.gz differ diff --git a/jsonpog-integration/POG/LUM/2017_UL/puWeights.json.gz b/jsonpog-integration/POG/LUM/2017_UL/puWeights.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..4fe0e5f00a4bc843d2be80e357556aa2004ca812 Binary files /dev/null and b/jsonpog-integration/POG/LUM/2017_UL/puWeights.json.gz differ diff --git a/jsonpog-integration/POG/LUM/2018_UL/puWeights.json.gz b/jsonpog-integration/POG/LUM/2018_UL/puWeights.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..ee3f19cf70797513c8b38f5babc4b0666955cc66 Binary files /dev/null and b/jsonpog-integration/POG/LUM/2018_UL/puWeights.json.gz differ diff --git a/jsonpog-integration/POG/LUM/2022_Summer22/puWeights.json.gz b/jsonpog-integration/POG/LUM/2022_Summer22/puWeights.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..4fca11d6c3c572cb02d4d6ab58a3066309970132 Binary files /dev/null and b/jsonpog-integration/POG/LUM/2022_Summer22/puWeights.json.gz differ diff --git a/jsonpog-integration/POG/LUM/2022_Summer22EE/puWeights.json.gz b/jsonpog-integration/POG/LUM/2022_Summer22EE/puWeights.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..d6b574982658fad5cf835b42b6e9ab4dd2a34b1f Binary files /dev/null and b/jsonpog-integration/POG/LUM/2022_Summer22EE/puWeights.json.gz differ diff --git a/jsonpog-integration/POG/LUM/2023_Summer23/puWeights.json.gz b/jsonpog-integration/POG/LUM/2023_Summer23/puWeights.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..c01822738aba98357edf303e6de6c5a3272e4f85 Binary files /dev/null and b/jsonpog-integration/POG/LUM/2023_Summer23/puWeights.json.gz differ diff --git a/jsonpog-integration/POG/LUM/2023_Summer23BPix/puWeights.json.gz b/jsonpog-integration/POG/LUM/2023_Summer23BPix/puWeights.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..64509fa5fa2a3ccb2ba45e26efddbcc544547d8e Binary files /dev/null and b/jsonpog-integration/POG/LUM/2023_Summer23BPix/puWeights.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2016postVFP_UL/muon_HighPt.json.gz b/jsonpog-integration/POG/MUO/2016postVFP_UL/muon_HighPt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..999c4fc74f37ba5e6ec7745ddf43aa4ae637fcf5 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2016postVFP_UL/muon_HighPt.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2016postVFP_UL/muon_JPsi.json.gz b/jsonpog-integration/POG/MUO/2016postVFP_UL/muon_JPsi.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..acd505657b459a511212a46c755e33dd06aa9ef0 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2016postVFP_UL/muon_JPsi.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2016postVFP_UL/muon_Z.json.gz b/jsonpog-integration/POG/MUO/2016postVFP_UL/muon_Z.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..bb4aa622702f568c4f6e107bd78e8f24468f06aa Binary files /dev/null and b/jsonpog-integration/POG/MUO/2016postVFP_UL/muon_Z.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2016preVFP_UL/muon_HighPt.json.gz b/jsonpog-integration/POG/MUO/2016preVFP_UL/muon_HighPt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..1fa4d1d47d5f3d3ad2439e65a6b76a3cdd60c9b7 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2016preVFP_UL/muon_HighPt.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2016preVFP_UL/muon_JPsi.json.gz b/jsonpog-integration/POG/MUO/2016preVFP_UL/muon_JPsi.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..88037c82cb74ec0482c0161de9f16c5a5fd68959 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2016preVFP_UL/muon_JPsi.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2016preVFP_UL/muon_Z.json.gz b/jsonpog-integration/POG/MUO/2016preVFP_UL/muon_Z.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..137500984c1f291cd3f2a76998aff28882a192d9 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2016preVFP_UL/muon_Z.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2017_UL/muon_HighPt.json.gz b/jsonpog-integration/POG/MUO/2017_UL/muon_HighPt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..09671835feb4b44a37e81a35df15502d94f0c8a6 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2017_UL/muon_HighPt.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2017_UL/muon_JPsi.json.gz b/jsonpog-integration/POG/MUO/2017_UL/muon_JPsi.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..c586e96951f30e01c419a4555a102b600f5077a9 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2017_UL/muon_JPsi.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2017_UL/muon_Z.json.gz b/jsonpog-integration/POG/MUO/2017_UL/muon_Z.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..936cb95d61870f49beb4143c5f30d048c5e91391 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2017_UL/muon_Z.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2018_UL/muon_HighPt.json.gz b/jsonpog-integration/POG/MUO/2018_UL/muon_HighPt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..0bcded7a03bfe058cd1c65b6420ac8ac307c7b75 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2018_UL/muon_HighPt.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2018_UL/muon_JPsi.json.gz b/jsonpog-integration/POG/MUO/2018_UL/muon_JPsi.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..cb611012257eef3b6a699b5d7a805dd97fa22748 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2018_UL/muon_JPsi.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2018_UL/muon_Z.json.gz b/jsonpog-integration/POG/MUO/2018_UL/muon_Z.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..d8cb20f136e0c8d36bf4a0c9e43332607d4fe39d Binary files /dev/null and b/jsonpog-integration/POG/MUO/2018_UL/muon_Z.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2022_Summer22/muon_HighPt.json.gz b/jsonpog-integration/POG/MUO/2022_Summer22/muon_HighPt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..27b2d9e3e7e067b5557b30791ecc585829db77fa Binary files /dev/null and b/jsonpog-integration/POG/MUO/2022_Summer22/muon_HighPt.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2022_Summer22/muon_JPsi.json.gz b/jsonpog-integration/POG/MUO/2022_Summer22/muon_JPsi.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..b06ec376f963e6b8c83aa3ea602f7cc2e0d14a53 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2022_Summer22/muon_JPsi.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2022_Summer22/muon_Z.json.gz b/jsonpog-integration/POG/MUO/2022_Summer22/muon_Z.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..55c5310487fa30db5464dd0dea2f130b6b074a05 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2022_Summer22/muon_Z.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2022_Summer22EE/muon_HighPt.json.gz b/jsonpog-integration/POG/MUO/2022_Summer22EE/muon_HighPt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..b808e749b9cda67db7b01dc0c5038394c54267f6 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2022_Summer22EE/muon_HighPt.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2022_Summer22EE/muon_JPsi.json.gz b/jsonpog-integration/POG/MUO/2022_Summer22EE/muon_JPsi.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..d5057b4e583a7a80b268b73bb1ca703a2260e810 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2022_Summer22EE/muon_JPsi.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2022_Summer22EE/muon_Z.json.gz b/jsonpog-integration/POG/MUO/2022_Summer22EE/muon_Z.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..09f79551672579226d32162c908c11418618c412 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2022_Summer22EE/muon_Z.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2023_Summer23/muon_HighPt.json.gz b/jsonpog-integration/POG/MUO/2023_Summer23/muon_HighPt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..9dcdfd0f150bb9001248c3ba21a6cb01cd230d91 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2023_Summer23/muon_HighPt.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2023_Summer23/muon_JPsi.json.gz b/jsonpog-integration/POG/MUO/2023_Summer23/muon_JPsi.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..e34eefa1edda37c0cd486695f8af2cd5936d3b5c Binary files /dev/null and b/jsonpog-integration/POG/MUO/2023_Summer23/muon_JPsi.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2023_Summer23/muon_Z.json.gz b/jsonpog-integration/POG/MUO/2023_Summer23/muon_Z.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..2de4292f15b2b866b80331178bfb36622f7aa1e9 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2023_Summer23/muon_Z.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2023_Summer23BPix/muon_HighPt.json.gz b/jsonpog-integration/POG/MUO/2023_Summer23BPix/muon_HighPt.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..14992e0c1ed802d56aed733813f173612e2b1904 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2023_Summer23BPix/muon_HighPt.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2023_Summer23BPix/muon_JPsi.json.gz b/jsonpog-integration/POG/MUO/2023_Summer23BPix/muon_JPsi.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..bdeffdfe05ecbecb8abfe351b58c00ad75d92a37 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2023_Summer23BPix/muon_JPsi.json.gz differ diff --git a/jsonpog-integration/POG/MUO/2023_Summer23BPix/muon_Z.json.gz b/jsonpog-integration/POG/MUO/2023_Summer23BPix/muon_Z.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..fc9a04c0fb960c86cfdd6159a0b7a09fbd38efe5 Binary files /dev/null and b/jsonpog-integration/POG/MUO/2023_Summer23BPix/muon_Z.json.gz differ diff --git a/jsonpog-integration/POG/MUO/README.md b/jsonpog-integration/POG/MUO/README.md new file mode 100644 index 0000000000000000000000000000000000000000..fc7865c7b0a5c4b9e7e18211649dcdb00e36becd --- /dev/null +++ b/jsonpog-integration/POG/MUO/README.md @@ -0,0 +1,13 @@ +## Instructions on how to use the MuonPOG corrections + +Under each data taking campaign, there will be 3 files with corrections: `muon_JPsi.json`, `muon_Z.json` and `muon_HighPt.json`, +depending on the method used to derive them and the pT range that they cover, as explained in the [MuonPOG Twiki](https://twiki.cern.ch/twiki/bin/view/CMS/MuonRun32022) for each year. +See the table below for more details on those files. + +| Correction file | Method used to derive SFs | pT range | Twiki link | +|:---------------:|:-------------------------:|:-----------------:|:----------:| +| `muon_JPsi` | TnP on J/Psi peak | pT < 30 GeV | [low-pt](https://twiki.cern.ch/twiki/bin/view/CMS/MuonRun32022#Low_pT_below_30_GeV) | +| `muon_Z` | TnP on Z peak | 15 < pT < 200 GeV | [medium-pt](https://twiki.cern.ch/twiki/bin/view/CMS/MuonRun32022#Medium_pT_15_GeV_to_200_GeV) | +| `muon_HighPt` | CutnCount on high-mass DY | pT > 200 GeV | [high-pt](https://twiki.cern.ch/twiki/bin/view/CMS/MuonRun32022#High_pT_above_200_GeV) | + +**Important Note:** Since Run 3 2023, SFs at the Z peak are computed as a function of eta, with more granularity, instead of the usual abs(eta). For all the previous years (Run 2 UL + 2022), and for all the pT regimes, even though SFs are computed as a function of abs(eta), it is possible to read them using eta as input instead, for consistency with 2023. Please, refer to the `muonExample.py` for more details about the usage. \ No newline at end of file diff --git a/jsonpog-integration/POG/TAU/2016_Legacy/tau.json.gz b/jsonpog-integration/POG/TAU/2016_Legacy/tau.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..d04972904f6c1e3b49134d9b6e54f314ec3df313 Binary files /dev/null and b/jsonpog-integration/POG/TAU/2016_Legacy/tau.json.gz differ diff --git a/jsonpog-integration/POG/TAU/2016postVFP_UL/tau.json.gz b/jsonpog-integration/POG/TAU/2016postVFP_UL/tau.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..d1b92233f84eb25917374668bfb6d81cb0b3f418 Binary files /dev/null and b/jsonpog-integration/POG/TAU/2016postVFP_UL/tau.json.gz differ diff --git a/jsonpog-integration/POG/TAU/2016preVFP_UL/tau.json.gz b/jsonpog-integration/POG/TAU/2016preVFP_UL/tau.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..3198f4bd9c98b999f665a0c3198b1798e3dc1eab Binary files /dev/null and b/jsonpog-integration/POG/TAU/2016preVFP_UL/tau.json.gz differ diff --git a/jsonpog-integration/POG/TAU/2017_ReReco/tau.json.gz b/jsonpog-integration/POG/TAU/2017_ReReco/tau.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..1c3d8511f2b7a7ed1f5785003fd6a603067ab675 Binary files /dev/null and b/jsonpog-integration/POG/TAU/2017_ReReco/tau.json.gz differ diff --git a/jsonpog-integration/POG/TAU/2017_UL/tau.json.gz b/jsonpog-integration/POG/TAU/2017_UL/tau.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..04cefeb213c0c728dc180ee56dd69fb3d99dff53 Binary files /dev/null and b/jsonpog-integration/POG/TAU/2017_UL/tau.json.gz differ diff --git a/jsonpog-integration/POG/TAU/2018_ReReco/tau.json.gz b/jsonpog-integration/POG/TAU/2018_ReReco/tau.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..4bbbea82466f0d64d309818e54b56516692f87b3 Binary files /dev/null and b/jsonpog-integration/POG/TAU/2018_ReReco/tau.json.gz differ diff --git a/jsonpog-integration/POG/TAU/2018_UL/tau.json.gz b/jsonpog-integration/POG/TAU/2018_UL/tau.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..dfce4da577fdff88c08a12ca3d139ebe77f11ece Binary files /dev/null and b/jsonpog-integration/POG/TAU/2018_UL/tau.json.gz differ diff --git a/jsonpog-integration/POG/TAU/2018_UL/tau_embed.json.gz b/jsonpog-integration/POG/TAU/2018_UL/tau_embed.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..47331953930724c0557440450fe7e744f5961597 Binary files /dev/null and b/jsonpog-integration/POG/TAU/2018_UL/tau_embed.json.gz differ diff --git a/jsonpog-integration/POG/TAU/README.md b/jsonpog-integration/POG/TAU/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b33034510287b828f708edebcc6841e88de1803b --- /dev/null +++ b/jsonpog-integration/POG/TAU/README.md @@ -0,0 +1,91 @@ +# TauPOG-recommonded tau corrections + +This repository contains the scale factors (SFs) and energy scales recommended by the TauPOG. +More detailed recommendations can be found on this TWiki page: https://twiki.cern.ch/twiki/bin/viewauth/CMS/TauIDRecommendationForRun2 + + +## Summary of available SFs + +This is a rough summary of the available SFs for `DeepTau2017v2p1`: + +| Tau component | `genmatch` | `DeepTau2017v2p1` `VSjet` | `DeepTau2017v2p1` `VSe` | `DeepTau2017v2p1` `VSmu` | energy scale | +|:--------------:|:-----------:|:--------------------------:|:------------------------:|:-------------------------:|:--------------:| +| real tau | `5` | vs. pT, or vs. DM | – (*) | – (*) | vs. DM | +| e -> tau fake | `1`, `3` | – | vs. eta | – | vs. DM and eta | +| mu -> tau fake | `2`, `4` | – | – | vs. eta | – (±1% unc.) | + +(*) An extra uncertainty is recommended if you use a different working point (WP) combination than was used to measure the SFs, +see the [TWiki](https://twiki.cern.ch/twiki/bin/viewauth/CMS/TauIDRecommendationForRun2). + +The gen-matching is defined as: +* `1` for prompt electrons +* `2` for prompt muons +* `3` for electrons from tau decay +* `4` for muons from tau decay +* `5` for real taus +* `6` for no match, or jets faking taus. +For more info on gen-matching of taus, please see [here](https://twiki.cern.ch/twiki/bin/viewauth/CMS/HiggsToTauTauWorking2016#MC_Matching). +Note that in nanoAOD this is available as `Tau_GenPartFlav`, but jet or no match correspond to `Tau_GenPartFlav==0` instead of `6`. + +The SFs are meant for the following campaigns: + +| Year label | MC campaign | Data campaign | +|:------------:|:------------------------:| :----------------------:| +| `2016Legacy` | `RunIISummer16MiniAODv3` | `17Jul2018` | +| `2017ReReco` | `RunIIFall17MiniAODv2` | `31Mar2018` | +| `2018ReReco` | `RunIIAutumn18MiniAOD` | `17Sep2018`/`22Jan2019` | + + +## Usage + +Please install the [`correctionlib`](https://github.com/cms-nanoAOD/correctionlib) tool to read these SFs. +There are several ways to install, but the best way is via `python3`, for example, +``` +source /cvmfs/sft.cern.ch/lcg/views/LCG_99/x86_64-centos7-gcc8-opt/setup.sh +git clone --recursive https://github.com/cms-tau-pog/correctionlib.git +cd correctionlib +python3 -m pip install . +python3 -c 'import correctionlib._core; import correctionlib.schemav2' # test +``` +Find out the content of the `tau.json` using +``` +gunzip POG/TAU/2018_ReReco/tau.json.gz +correction summary POG/TAU/2018_ReReco/tau.json +``` +An example is given in [`examples/tauExample.py`](../../examples/tauExample.py). +You can load the set of corrections as follows in python as +``` +import correctionlib as _core +cset = _core.CorrectionSet.from_file("tau.json") +corr1 = cset["DeepTau2017v2p1VSjet"] +corr2 = cset["DeepTau2017v2p1VSe"] +corr3 = cset["tau_trigger"] +corr4 = cset["tau_energy_scale"] +``` +And then on an event-by-event basis with reconstructed tau objects, you can do +``` +sf1 = corr1.evaluate(pt,dm,genmatch,wp,syst,"pt") +sf2 = corr2.evaluate(eta,genmatch,wp,syst) +sf3 = corr3.evaluate(pt,dm,"etau",wp,"sf",syst) +tes = corr4.evaluate(pt,eta,dm,genmatch,"DeepTau2017v2p1",syst) +``` +Where `syst='nom'`, `'up'` or `'down'`. +A C++ example can be found [here](https://github.com/cms-nanoAOD/correctionlib/blob/master/src/demo.cc). + +Alternative way to load the JSON files (including gunzip'ed): +``` +import correctionlib as _core +fname = "tau.json.gz" +if fname.endswith(".json.gz"): + import gzip + with gzip.open(fname,'rt') as file: + data = file.read().strip() + cset = _core.CorrectionSet.from_string(data) +else: + cset = _core.CorrectionSet.from_file(fname) +``` + + +## References + +The TauPOG JSON files are created from https://github.com/cms-tau-pog/correctionlib diff --git a/jsonpog-integration/README.md b/jsonpog-integration/README.md new file mode 100644 index 0000000000000000000000000000000000000000..be686feffcd55f6204e425231cf2d08eac05790c --- /dev/null +++ b/jsonpog-integration/README.md @@ -0,0 +1,64 @@ +# jsonPOG-integration + + [](https://gitlab.cern.ch/cms-nanoAOD/jsonpog-integration/-/commits/master) + +## Instructions for users + +The `correctionlib` library needed to read the files from python or C++ lives on [github](https://github.com/cms-nanoAOD/correctionlib), see +its [documentation](https://cms-nanoaod.github.io/correctionlib/) for installation and usage instructions. + +Some examples on how to the read the files are provided here in the [examples](./examples/) folder. +Also see [these examples](https://gist.github.com/pieterdavid/a560e65658386d70a1720cb5afe4d3e9) for how to use the library from a ROOT::RDataFrame application. + +The latest files from the `master` branch from the [main project](https://gitlab.cern.ch/cms-nanoAOD/jsonpog-integration) +are synced once a day to CVMFS at: `/cvmfs/cms.cern.ch/rsync/cms-nanoAOD/jsonpog-integration` (through [CMSSDT Jenkins](https://cmssdt.cern.ch/jenkins/job/cvmfs-cms-rsync-gitlab-repo/) ) + +The content of all the available files is summarized on [this webpage](https://cms-nanoaod-integration.web.cern.ch/commonJSONSFs/). + +Inspecting the files manually can also be done using the command `correction summary file.json`. + +## Instructions for POGs to add corrections + +### [POG](./POG/) folder in the repository + +In this folder we store all the corrections. +Each physics object has a separate json file, and each POG has a folder for storage. + +| directory | year_campaign | name.json | +| ---------- | --------------| ----------| +| POG/EGM | "X_Y" | photon.json | +| | "" | electron.json | +| POG/TAU | "" | tau.json | +| POG/MUON | "" | muon.json | +| POG/JME | "" | fatJetPuppi.json | +| | "" | jetCHS.json | +| POG/BTV | "" | btagging.json | +| | "" | ctagging.json | +| | "" | subjet_tagging.json | +| POG/LUM | "" | puWeights.json | + +To be taken care of: +1. different campaings are organized in folders with label "X_Y" i.e. (2016preVFP_UL, 2016postVFP_UL, 2017_UL, 2018_UL, 2018_EOY...) +2. each physics object in nanoAOD gets a separate json +3. the "inputs" labels should be unique and standardized, please have a look at the existing files +4. store the json in .gz format for compression +5. when changing the content of an existing correction (same name), make sure you increase its version number +6. for systematic variations, until they are [better supported in correctionlib](https://github.com/cms-nanoAOD/correctionlib/issues/4), +please provide both the nominal and *up/down variations* of the corrections, not the uncertainties themselves. + +### Before making a merge request + +* Configure the forked project from which you will be making a merge request, to properly run the test scripts: + * Create a project access token in the forked repository *Settings > Access Tokens*: create a token with `Reporter` rights and `api` scope + * Copy the token to a CI variable named `GITLAB_API_TOKEN` under the repository *Settings > CI/CD > Variables* +* Make sure you have rebased your branch on top of the latest `master` branch from the [main project](https://gitlab.cern.ch/cms-nanoAOD/jsonpog-integration) +* Validate the files you have added or modified using `correction validate file.json` + +### Automatic tests + +Once a merge request is made, if the above has been done, the automatic tests will run. +The tests will happen with the script defined [here](./script/testMR.sh). +Goal of the test: +* verify that the files are compliant to the JSON schema ([currently v2](https://cms-nanoaod.github.io/correctionlib/schemav2.html)). +* produce a summary to inspect the content of new files +* for modified files, produce a summary of the changes, and verify that corrections whose content changes have their version number increased. diff --git a/jsonpog-integration/examples/Makefile b/jsonpog-integration/examples/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..c8484ba5c54b9db8d5d81d9096d015c642b0c12b --- /dev/null +++ b/jsonpog-integration/examples/Makefile @@ -0,0 +1,9 @@ +all: jercExample + +jercExample: jercExample.C + g++ $^ -o $@ $(shell correction config --cflags --ldflags --rpath) + +clean: + @rm -r jercExample + +.PHONY: clean diff --git a/jsonpog-integration/examples/README.md b/jsonpog-integration/examples/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d5473bb3b25f69950782eb3e6979872d20b59eb7 --- /dev/null +++ b/jsonpog-integration/examples/README.md @@ -0,0 +1,28 @@ +# Examples + +Basic examples for standard usage of JSON tables with `correctionlib` are provided. + +## BTV + +## Electrons + +## JERC + +Two versino of the example are provided: one in C++ and one in Python. Both essentially do the same. The Python example can be directly executed in the command line, whereas the C++ example should first be compiled with `make`. + +Six use cases are shown: +- Retrieve the jet energy correction at a single level (e.g. `L2Relative`) for AK4 and AK8. +- Retrieve the jet energy compound correction (all levels---this is probably what you need in most cases). +- Retrieve the jet energy uncertainty (e.g. `Total`---probably also what most analyses will need). +- Retrieve the jet energy resolution scale factor +- Retrieve the jet energy resolution +- Retrieve the jet energy correction factor from the jet energy resolution smearing + +## JMAR + +## MET $\phi$ + +## Muons + +## Tauons + diff --git a/jsonpog-integration/examples/btvExample.py b/jsonpog-integration/examples/btvExample.py new file mode 100644 index 0000000000000000000000000000000000000000..27d98f896a69ee0b67a6d86e0c8f4766d4cad301 --- /dev/null +++ b/jsonpog-integration/examples/btvExample.py @@ -0,0 +1,56 @@ +import numpy as np +import os +import correctionlib + +sfDir = os.path.join(".", "..", "POG", "BTV", "2018_UL") +btvjson = correctionlib.CorrectionSet.from_file(os.path.join(sfDir, "btagging.json.gz")) + +# generate 20 dummy jet features +jet_pt = np.random.exponential(50., 20) +jet_eta = np.random.uniform(0.0, 2.4, 20) +jet_flav = np.random.choice([0, 4, 5], 20) +jet_discr = np.random.uniform(0.0, 1.0, 20) + +# separate light and b/c jets +light_jets = np.where(jet_flav == 0) +bc_jets = np.where(jet_flav != 0) + +# case 1: fixedWP correction with mujets (here medium WP) +# evaluate('systematic', 'working_point', 'flavor', 'abseta', 'pt') +bc_jet_sf = btvjson["deepJet_mujets"].evaluate("central", "M", + jet_flav[bc_jets], jet_eta[bc_jets], jet_pt[bc_jets]) +light_jet_sf = btvjson["deepJet_incl"].evaluate("central", "M", + jet_flav[light_jets], jet_eta[light_jets], jet_pt[light_jets]) +print("\njet SFs for mujets at medium WP:") +print(f"SF b/c: {bc_jet_sf}") +print(f"SF light: {light_jet_sf}") + +# case 2: fixedWP correction uncertainty (here tight WP and comb SF) +# evaluate('systematic', 'working_point', 'flavor', 'abseta', 'pt') +bc_jet_sf = btvjson["deepJet_comb"].evaluate("up_correlated", "T", + jet_flav[bc_jets], jet_eta[bc_jets], jet_pt[bc_jets]) +light_jet_sf = btvjson["deepJet_incl"].evaluate("up_correlated", "T", + jet_flav[light_jets], jet_eta[light_jets], jet_pt[light_jets]) +print("\njet SF up_correlated for comb at tight WP:") +print(f"SF b/c: {bc_jet_sf}") +print(f"SF light: {light_jet_sf}") + +# case 3: shape correction SF +# evaluate('systematic', 'flavor', 'eta', 'pt', 'discriminator') +jet_sf = btvjson["deepJet_shape"].evaluate("central", + jet_flav, jet_eta, jet_pt, jet_discr) +print("\njet SF for shape correction:") +print(f"SF: {jet_sf}") + +# case 4: shape correction SF uncertainties +# evaluate('systematic', 'flavor', 'eta', 'pt', 'discriminator') +c_jets = np.where(jet_flav == 4) +blight_jets = np.where(jet_flav != 4) +b_jet_sf = btvjson["deepJet_shape"].evaluate("up_hfstats2", + jet_flav[blight_jets], jet_eta[blight_jets], jet_pt[blight_jets], jet_discr[blight_jets]) +c_jet_sf = btvjson["deepJet_shape"].evaluate("up_cferr1", + jet_flav[c_jets], jet_eta[c_jets], jet_pt[c_jets], jet_discr[c_jets]) +print("\njet SF up_hfstats2 for shape correction b/light jets:") +print(f"SF b/light: {b_jet_sf}") +print("jet SF up_cferr1 for shape correction c jets:") +print(f"SF c: {c_jet_sf}") diff --git a/jsonpog-integration/examples/btvExample_106X.py b/jsonpog-integration/examples/btvExample_106X.py new file mode 100644 index 0000000000000000000000000000000000000000..f3c12e8abbb6de8ccc05e2551995581e1a9f26aa --- /dev/null +++ b/jsonpog-integration/examples/btvExample_106X.py @@ -0,0 +1,51 @@ +import numpy as np +import os +from correctionlib import _core + +sfDir = os.path.join(".", "..", "POG", "BTV", "2016preVFP_UL") +sfName = os.path.join(sfDir, "btagging.json.gz") +if sfName.endswith(".gz"): + import gzip + with gzip.open(sfName, "rt") as f: + data = f.read().strip() + btvjson = _core.CorrectionSet.from_string(data) +else: + btvjson = _core.CorrectionSet.from_file(sfName) + +# case 1: fixedWP correction with mujets (here medium WP) +# evaluate('systematic', 'working_point', 'flavor', 'abseta', 'pt') +bc_jet_sf = btvjson["deepJet_mujets"].evaluate("central", "M", + 5, 1.2, 60.) +light_jet_sf = btvjson["deepJet_incl"].evaluate("central", "M", + 0, 2.2, 100.) +print("\njet SFs for mujets at medium WP:") +print("SF b/c: {bc_jet_sf}".format(bc_jet_sf=bc_jet_sf)) +print("SF light: {light_jet_sf}".format(light_jet_sf=light_jet_sf)) + +# case 2: fixedWP correction uncertainty (here tight WP and comb SF) +# evaluate('systematic', 'working_point', 'flavor', 'abseta', 'pt') +bc_jet_sf = btvjson["deepJet_comb"].evaluate("up_correlated", "T", + 5, 1.2, 60.) +light_jet_sf = btvjson["deepJet_incl"].evaluate("up_correlated", "T", + 0, 2.2, 100.) +print("\njet SF up_correlated for comb at tight WP:") +print("SF b/c: {bc_jet_sf}".format(bc_jet_sf=bc_jet_sf)) +print("SF light: {light_jet_sf}".format(light_jet_sf=light_jet_sf)) + +# case 3: shape correction SF +# evaluate('systematic', 'flavor', 'eta', 'pt', 'discriminator') +jet_sf = btvjson["deepJet_shape"].evaluate("central", + 5, 1.2, 60., 0.95) +print("\njet SF for shape correction:") +print("SF: {jet_sf}".format(jet_sf=jet_sf)) + +# case 4: shape correction SF uncertainties +# evaluate('systematic', 'flavor', 'eta', 'pt', 'discriminator') +b_jet_sf = btvjson["deepJet_shape"].evaluate("up_hfstats2", + 5, 1.2, 60., 0.95) +c_jet_sf = btvjson["deepJet_shape"].evaluate("up_cferr1", + 4, 2.2, 100., 0.45) +print("\njet SF up_hfstats2 for shape correction b/light jets:") +print("SF b/light: {b_jet_sf}".format(b_jet_sf=b_jet_sf)) +print("jet SF up_cferr1 for shape correction c jets:") +print("SF c: {c_jet_sf}".format(c_jet_sf=c_jet_sf)) diff --git a/jsonpog-integration/examples/electronExample.py b/jsonpog-integration/examples/electronExample.py new file mode 100644 index 0000000000000000000000000000000000000000..cb9c195a7bc7986a848796d46e36c6371efef81e --- /dev/null +++ b/jsonpog-integration/examples/electronExample.py @@ -0,0 +1,38 @@ +## example how to read the electron format v2 +from correctionlib import _core + +evaluator = _core.CorrectionSet.from_file('./../POG/EGM/2016postVFP_UL/electron.json.gz') + +valsf= evaluator["UL-Electron-ID-SF"].evaluate("2016postVFP","sf","RecoBelow20",1.1, 15.0) +print("sf is:"+str(valsf)) + +valsf= evaluator["UL-Electron-ID-SF"].evaluate("2016postVFP","sf","RecoAbove20",1.1, 25.0) +print("sf is:"+str(valsf)) + +valsf= evaluator["UL-Electron-ID-SF"].evaluate("2016postVFP","sf","Medium",1.1, 34.0) +print("sf is:"+str(valsf)) + +valsystup= evaluator["UL-Electron-ID-SF"].evaluate("2016postVFP","sfup","Medium",1.1, 34.0) +print("systup is:"+str(valsystup)) + +valsystdown= evaluator["UL-Electron-ID-SF"].evaluate("2016postVFP","sfdown","Medium",1.1, 34.0) +print("systdown is:"+str(valsystdown)) + +## example how to read the electron format in 2023 Prompt (eta, pT, phi) + +evaluator = _core.CorrectionSet.from_file('./../POG/EGM/2023_Summer23BPix/electron.json.gz') + +valsf= evaluator["Electron-ID-SF"].evaluate("2023PromptD","sf","RecoBelow20",1.1, 15.0, 2.0) +print("sf is:"+str(valsf)) + +valsf= evaluator["Electron-ID-SF"].evaluate("2023PromptD","sf","Reco20to75",1.1, 25.0, 2.0) +print("sf is:"+str(valsf)) + +valsf= evaluator["Electron-ID-SF"].evaluate("2023PromptD","sf","Medium",1.1, 34.0, -1.0) +print("sf is:"+str(valsf)) + +valsystup= evaluator["Electron-ID-SF"].evaluate("2023PromptD","sfup","Medium",1.1, 34.0, -1.0) +print("systup is:"+str(valsystup)) + +valsystdown= evaluator["Electron-ID-SF"].evaluate("2023PromptD","sfdown","Medium",1.1, 34.0, -1.0) +print("systdown is:"+str(valsystdown)) \ No newline at end of file diff --git a/jsonpog-integration/examples/electronHltExample.py b/jsonpog-integration/examples/electronHltExample.py new file mode 100644 index 0000000000000000000000000000000000000000..3ffe7755e39c15970a3bc4bb816cbb295f64582b --- /dev/null +++ b/jsonpog-integration/examples/electronHltExample.py @@ -0,0 +1,19 @@ +## example how to read the electronHlt format v2 +from correctionlib import _core + +evaluator = _core.CorrectionSet.from_file('./../POG/EGM/2022_Summer22/electronHlt.json.gz') + +valsf= evaluator["Electron-HLT-SF"].evaluate("2022Re-recoBCD","sf","HLT_SF_Ele30_TightID",1.1, 45.0) +print("sf is:"+str(valsf)) + +valsystup= evaluator["Electron-HLT-SF"].evaluate("2022Re-recoBCD","sfup","HLT_SF_Ele30_TightID",1.1, 45.0) +print("systup is:"+str(valsystup)) + +valsystdown= evaluator["Electron-HLT-SF"].evaluate("2022Re-recoBCD","sfdown","HLT_SF_Ele30_TightID",1.1, 45.0) +print("systdown is:"+str(valsystdown)) + +valeffdata= evaluator["Electron-HLT-DataEff"].evaluate("2022Re-recoBCD","nom","HLT_SF_Ele30_MVAiso90ID",1.1, 45.0) +print("sf is:"+str(valeffdata)) + +valeffMCup= evaluator["Electron-HLT-McEff"].evaluate("2022Re-recoE+PromptFG","up","HLT_SF_Ele30_MVAiso90ID",1.1, 45.0) +print("sf is:"+str(valeffMCup)) \ No newline at end of file diff --git a/jsonpog-integration/examples/jercExample.C b/jsonpog-integration/examples/jercExample.C new file mode 100644 index 0000000000000000000000000000000000000000..f37b85b3690de1da3b7cf47dc654a576d70f1a53 --- /dev/null +++ b/jsonpog-integration/examples/jercExample.C @@ -0,0 +1,167 @@ +#include <cassert> +#include <filesystem> +#include <iostream> +#include <string> +#include "correction.h" + +using namespace std; +namespace fs = filesystem; + +double singleLevel (const map<string, correction::Variable::Type>& example, + const unique_ptr<correction::CorrectionSet>& cset, + string jec, string lvl, string algo) +{ + string key = jec + '_' + lvl + '_' + algo; + cout << "JSON access to key: " << key << endl; + + correction::Correction::Ref sf = cset->at(key); + + cout << "Inputs:"; + vector<correction::Variable::Type> inputs; + for (const correction::Variable& input: sf->inputs()) { + cout << ' ' << input.name(); + inputs.push_back(example.at(input.name())); + } + cout << endl; + + double result = sf->evaluate(inputs); + cout << "JSON result: " << result << endl; + + return result; +} + +double compoundLevel (const map<string, correction::Variable::Type>& example, + const unique_ptr<correction::CorrectionSet>& cset, + string jec, string lvl, string algo) +{ + string key = jec + '_' + lvl + '_' + algo; + cout << "JSON access to key: " << key << endl; + + correction::CompoundCorrection::Ref sf = cset->compound().at(key); // note: the only different is here + + cout << "Inputs:"; + vector<correction::Variable::Type> inputs; + for (const correction::Variable& input: sf->inputs()) { + cout << ' ' << input.name(); + inputs.push_back(example.at(input.name())); + } + cout << endl; + + double result = sf->evaluate(inputs); + cout << "JSON result: " << result << endl; + + return result; +} + +void smear (const map<string, correction::Variable::Type>& example, + const unique_ptr<correction::CorrectionSet>& cset) +{ + correction::Correction::Ref sf = cset->at("JERSmear"); + + cout << "Inputs:"; + vector<correction::Variable::Type> inputs; + for (const correction::Variable& input: sf->inputs()) { + cout << ' ' << input.name(); + inputs.push_back(example.at(input.name())); + } + cout << endl; + + double result = sf->evaluate(inputs); + cout << "JSON result: " << result << endl; + + // to implement smearing in the analysis code, multiply + // the `jersmear_factor` obtained above to the `Jet_pt` + // and `Jet_mass` variables +} + +int main () +{ + map<string, correction::Variable::Type> example { + { // jet transverse momentum + "JetPt", 100.0}, + { // jet pseudorapidity + "JetEta", 0.0}, + { // jet azimuthal angle + "JetPhi", 0.2}, + { // jet area + "JetA", 0.5}, + { // median energy density (pileup) + "Rho", 15.0}, + { // systematic variation (only for JER SF) + "systematic", "nom"}, + { // pT of matched gen-level jet (only for JER smearing) + "GenPt", 80.0}, // or -1 if no match + { // unique event ID used for deterministic pseudorandom number generation (only for JER smearing) + "EventID", 12345}, + }; + + string jec = "Summer19UL16_V7_MC", // JEC base tag + jer = "Summer20UL16_JRV3_MC", // JER base tag + algo_ak4 = "AK4PFchs", // AK4 jet algorithm + algo_ak8 = "AK8PFPuppi", // AK8 jet algorithm + lvl_single = "L2Relative", // jet energy correction level + lvl_compound = "L1L2L3Res", // jet energy correction level + unc = "Total"; // jet energy uncertainty + + // print input information + cout << "\nJEC parameters\n##############" + << "\njec = " << jec + << "\nalgo_ak4 = " << algo_ak4 + << "\nalgo_ak8 = " << algo_ak8; + //cout << "\nJetPt = " << get<double>(example["JetPt"]) << endl; + for (const string& v: {"JetPt", "JetEta", "JetA", "JetPhi", "JetA", "Rho"}) + cout << '\n' << v << ' ' << get<double>(example[v]); + cout << '\n' << endl; + + /**** load JSON files using correctionlib ****/ + + // AK4 + fs::path fname_ak4 = "../POG/JME/2016postVFP_UL/jet_jerc.json.gz"; + cout << "Loading JSON file: " << fname_ak4 << endl; + assert(fs::exists(fname_ak4)); + unique_ptr<correction::CorrectionSet> cset_ak4 = + correction::CorrectionSet::from_file(fname_ak4.string()); + + // AK8 + fs::path fname_ak8 = "../POG/JME/2016postVFP_UL/fatJet_jerc.json.gz"; + cout << "Loading JSON file: " << fname_ak8 << endl; + assert(fs::exists(fname_ak8)); + unique_ptr<correction::CorrectionSet> cset_ak8 = + correction::CorrectionSet::from_file(fname_ak8.string()); + + // tool for JER smearing + fs::path fname_jersmear = "../POG/JME/jer_smear.json.gz"; + cout << "Loading JSON file: " << fname_jersmear << endl; + assert(fs::exists(fname_jersmear)); + auto cset_jersmear = correction::CorrectionSet::from_file(fname_jersmear.string()); + + /**** run examples ****/ + + cout << "\nExample 1: single JEC level\n===================" << endl; + singleLevel(example, cset_ak4, jec, lvl_single, algo_ak4); + singleLevel(example, cset_ak8, jec, lvl_single, algo_ak8); + + cout << "\nExample 2: compound JEC level\n===================" << endl; + compoundLevel(example, cset_ak4, jec, lvl_compound, algo_ak4); + compoundLevel(example, cset_ak8, jec, lvl_compound, algo_ak8); + + cout << "\nExample 3: JEC uncertainty source\n===================" << endl; + singleLevel(example, cset_ak4, jec, unc, algo_ak4); + singleLevel(example, cset_ak8, jec, unc, algo_ak8); + // additional note: Regrouped/reduced set of uncertainty sorces as detailed in + // https://twiki.cern.ch/twiki/bin/viewauth/CMS/JECUncertaintySources#Run_2_reduced_set_of_uncertainty # noqa + // are included in relevant JSON files (currently UL) with a "Regrouped_"-prefix, + // e.g. for 2016 one could access "Absolute_2016" via: + // sf = cset["Summer19UL16_V7_MC_Regrouped_Absolute_2016_AK4PFchs"] + + cout << "\nExample 4: JER scale factor\n===================" << endl; + example["JERSF"] = singleLevel(example, cset_ak4, jer, "ScaleFactor", algo_ak4); + + cout << "\nExample 5: JER (pT resolution)\n===================" << endl; + example["JER"] = singleLevel(example, cset_ak4, jer, "PtResolution", algo_ak4); + + cout << "\nExample 6: JER smearing\n===================" << endl; + smear(example, cset_jersmear); + + return 0; +} diff --git a/jsonpog-integration/examples/jercExample.py b/jsonpog-integration/examples/jercExample.py new file mode 100644 index 0000000000000000000000000000000000000000..74013d0459e355f3afa2941f1c8de932f165841a --- /dev/null +++ b/jsonpog-integration/examples/jercExample.py @@ -0,0 +1,252 @@ +#! /usr/bin/env python +# Example of how to read the JME-JERC JSON files +# For more information, see the README in +# https://gitlab.cern.ch/cms-nanoAOD/jsonpog-integration/-/tree/master/POG/JME +# For a comparison to the CMSSW-syntax refer to +# https://github.com/cms-jet/JECDatabase/blob/master/scripts/JERC2JSON/minimalDemo.py + +import os +import correctionlib._core as core + +# path to directory of this script +__this_dir__ = os.path.dirname(__file__) + +# +# helper functions +# + +def get_corr_inputs(input_dict, corr_obj): + """ + Helper function for getting values of input variables + given a dictionary and a correction object. + """ + input_values = [input_dict[inp.name] for inp in corr_obj.inputs] + return input_values + + +# +# values of input variables +# + +example_value_dict = { + # jet transverse momentum + "JetPt": 100.0, + # jet pseudorapidity + "JetEta": 0.0, + # jet azimuthal angle + "JetPhi": 0.2, + # jet area + "JetA": 0.5, + # median energy density (pileup) + "Rho": 15.0, + # systematic variation (only for JER SF) + "systematic": "nom", + # pT of matched gen-level jet (only for JER smearing) + "GenPt": 80.0, # or -1 if no match + # unique event ID used for deterministic + # pseudorandom number generation (only for JER smearing) + "EventID": 12345, +} + + +# +# JEC-related examples +# + +# JEC base tag +jec = "Summer19UL16_V7_MC" + +# jet algorithms +algo = "AK4PFchs" +algo_ak8 = "AK8PFPuppi" + +# jet energy correction level +lvl = "L2Relative" + +# jet energy correction level +lvl_compound = "L1L2L3Res" + +# jet energy uncertainty +unc = "Total" + +# print input information +print("\n\nJEC parameters") +print("##############\n") + +print("jec = {}".format(jec)) +print("algo = {}".format(algo)) +print("algo_ak8 = {}".format(algo_ak8)) +for v in ("JetPt", "JetEta", "JetA", "JetPhi", "JetA", "Rho"): + print("{} = {}".format(v, example_value_dict[v])) + + +# +# load JSON files using correctionlib +# + +# AK4 +fname = os.path.join(__this_dir__, "../POG/JME/2016postVFP_UL/jet_jerc.json.gz") +print("\nLoading JSON file: {}".format(fname)) +cset = core.CorrectionSet.from_file(os.path.join(fname)) + +# AK8 +fname_ak8 = os.path.join(__this_dir__, "../POG/JME/2016postVFP_UL/fatJet_jerc.json.gz") +print("\nLoading JSON file: {}".format(fname_ak8)) +cset_ak8 = core.CorrectionSet.from_file(os.path.join(fname_ak8)) + +# tool for JER smearing +fname_jersmear = os.path.join(__this_dir__, "../POG/JME/jer_smear.json.gz") +print("\nLoading JSON file: {}".format(fname_jersmear)) +cset_jersmear = core.CorrectionSet.from_file(os.path.join(fname_jersmear)) + + +# +# example 1: getting a single JEC level +# + +print("\n\nExample 1: single JEC level\n===================") + +key = "{}_{}_{}".format(jec, lvl, algo) +key_ak8 = "{}_{}_{}".format(jec, lvl, algo_ak8) +print("JSON access to keys: '{}' and '{}'".format(key, key_ak8)) +sf = cset[key] +sf_ak8 = cset_ak8[key_ak8] + +sf_input_names = [inp.name for inp in sf.inputs] +print("Inputs: " + ", ".join(sf_input_names)) + +inputs = get_corr_inputs(example_value_dict, sf) +print("JSON result AK4: {}".format(sf.evaluate(*inputs))) + +inputs = get_corr_inputs(example_value_dict, sf_ak8) +print("JSON result AK8: {}".format(sf_ak8.evaluate(*inputs))) + + +# +# example 2: accessing the JEC as a CompoundCorrection +# + +print("\n\nExample 2: compound JEC level\n===================") + +key = "{}_{}_{}".format(jec, lvl_compound, algo) +key_ak8 = "{}_{}_{}".format(jec, lvl_compound, algo_ak8) +print("JSON access to keys: '{}' and '{}'".format(key, key_ak8)) +sf = cset.compound[key] +sf_ak8 = cset_ak8.compound[key_ak8] + +sf_input_names = [inp.name for inp in sf.inputs] +print("Inputs: " + ", ".join(sf_input_names)) + +inputs = get_corr_inputs(example_value_dict, sf) +print("JSON result AK4: {}".format(sf.evaluate(*inputs))) + +inputs = get_corr_inputs(example_value_dict, sf_ak8) +print("JSON result AK8: {}".format(sf_ak8.evaluate(*inputs))) + + +# +# example 3: accessing the JEC uncertainty sources +# + +print("\n\nExample 3: JEC uncertainty source\n===================") + +# additional note: Regrouped/reduced set of uncertainty sorces as detailed in +# https://twiki.cern.ch/twiki/bin/viewauth/CMS/JECUncertaintySources#Run_2_reduced_set_of_uncertainty # noqa +# are included in relevant JSON files (currently UL) with a "Regrouped_"-prefix, +# e.g. for 2016 one could access "Absolute_2016" via: +# sf = cset["Summer19UL16_V7_MC_Regrouped_Absolute_2016_AK4PFchs"] + +key = "{}_{}_{}".format(jec, unc, algo) +print("JSON access to key: '{}'".format(key)) +sf = cset[key] + +sf_input_names = [inp.name for inp in sf.inputs] +print("Inputs: " + ", ".join(sf_input_names)) + +inputs = get_corr_inputs(example_value_dict, sf) +print("JSON result: {}".format(sf.evaluate(*inputs))) + + + +######################## +# JER-related examples # +######################## + +# JER base tag +jer = "Summer20UL16_JRV3_MC" + +# algorithms +algo = "AK4PFchs" + +# print input information +print("\n\nJER parameters") +print("##############\n") + +print("jer = {}".format(jer)) +print("algo = {}".format(algo)) +for v in ("JetPt", "JetEta", "Rho"): + print("{} = {}".format(v, example_value_dict[v])) + + +# +# example 4: accessing the JER scale factor +# + +print("\n\nExample 4: JER scale factor\n===================") + +key = "{}_{}_{}".format(jer, "ScaleFactor", algo) +print("JSON access to key: '{}'".format(key)) +sf = cset[key] + +sf_input_names = [inp.name for inp in sf.inputs] +print("Inputs: " + ", ".join(sf_input_names)) + +inputs = get_corr_inputs(example_value_dict, sf) +jersf_value = sf.evaluate(*inputs) +print("JSON result: {}".format(jersf_value)) + + +# +# example 5: accessing the JER +# + +print("\n\nExample 5: JER (pT resolution)\n===================") + +key = "{}_{}_{}".format(jer, "PtResolution", algo) +print("JSON access to key: '{}'".format(key)) +sf = cset[key] + +sf_input_names = [inp.name for inp in sf.inputs] +print("Inputs: " + ", ".join(sf_input_names)) + +inputs = get_corr_inputs(example_value_dict, sf) +jer_value = sf.evaluate(*inputs) +print("JSON result: {}".format(jer_value)) + + +# +# example 6: performing JER smearing +# (needs JER/JERSF from previous step) +# + + +print("\n\nExample 6: JER smearing\n===================") + +key_jersmear = "JERSmear" +print("JSON access to key: '{}'".format(key_jersmear)) +sf_jersmear = cset_jersmear[key_jersmear] + +# add previously obtained JER/JERSF values to inputs +example_value_dict["JER"] = jer_value +example_value_dict["JERSF"] = jersf_value + +sf_input_names = [inp.name for inp in sf_jersmear.inputs] +print("Inputs: " + ", ".join(sf_input_names)) + +inputs = get_corr_inputs(example_value_dict, sf_jersmear) +jersmear_factor = sf_jersmear.evaluate(*inputs) +print("JSON result: {}".format(jersmear_factor)) + +# to implement smearing in the analysis code, multiply +# the `jersmear_factor` obtained above to the `Jet_pt` +# and `Jet_mass` variables diff --git a/jsonpog-integration/examples/jmarExample.py b/jsonpog-integration/examples/jmarExample.py new file mode 100644 index 0000000000000000000000000000000000000000..9eeade04bfc5e175b0e3bbbdd7d4b17860f90480 --- /dev/null +++ b/jsonpog-integration/examples/jmarExample.py @@ -0,0 +1,59 @@ +#! /usr/bin/env python +# Example of how to read the tau JSON file +# For more information, see the README in +# https://gitlab.cern.ch/cms-nanoAOD/jsonpog-integration/-/tree/master/POG/JME +from correctionlib import _core + +# Load CorrectionSet +fname = "../POG/JME/2017_EOY/2017_jmar.json.gz" +if fname.endswith(".json.gz"): + import gzip + with gzip.open(fname,'rt') as file: + data = file.read().strip() + evaluator = _core.CorrectionSet.from_string(data) +else: + evaluator = _core.CorrectionSet.from_file(fname) + + +##### DeepAK8/ParticleNet tagging +eta, pt, syst, wp = 2.0,450.,"nom","0p1" +map_name = "ParticleNet_Top_Nominal" +valsf= evaluator[map_name].evaluate(eta, pt, syst, wp) +print("Example for "+map_name) +print("The "+syst+" SF for a Jet with pt="+str(pt) + " GeV and eta="+str(eta) + " for a misidentification rate of "+wp+" is "+str(valsf)) + +##### cut-based top tagging +eta, pt, syst, wp = 2.0,450.,"nom","wp1" +map_name = "Top_tagging_PUPPI_mergedTop" +valsf= evaluator[map_name].evaluate(eta, pt, syst, wp) +print("Example for "+map_name) +print("The "+syst+" SF for a Jet with pt="+str(pt) + " GeV and eta="+str(eta) + " for the "+wp+" working point is "+str(valsf)) + +##### cut-based W tagging +eta, pt, syst, wp = 2.0,450.,"nom","2017HP43DDT" +map_name = "Wtagging_2017HP43DDT" +valsf= evaluator[map_name].evaluate(eta, pt, syst, wp) +print("Example for "+map_name) +print("The "+syst+" SF for a Jet with pt="+str(pt) + " GeV and eta="+str(eta) + " for the "+wp+" working point is "+str(valsf)) + + +##### soft drop mass correction +eta, pt, syst = 1.0,200.,"nom" +map_name = "JMS" +valsf= evaluator[map_name].evaluate(eta, pt, syst) +print("Example for "+map_name) +print("The "+syst+" SF for a Jet with pt="+str(pt) + " GeV and eta="+str(eta) + " is "+str(valsf)) + +##### PU JetID +eta, pt, syst, wp = 2.0,20.,"nom","L" +map_name = "PUJetID_eff" +valsf= evaluator[map_name].evaluate(eta, pt, syst, wp) +print("Example for "+map_name) +print("The "+syst+" SF for a Jet with pt="+str(pt) + " GeV and eta="+str(eta) + " for the "+wp+" working point is "+str(valsf)) + +##### Quark-Gluon tagging +eta, pt, syst, discriminator_value = 1.0,20.,"nom",0.5 +map_name = "Gluon_Pythia" +valsf= evaluator[map_name].evaluate(eta, pt, syst, discriminator_value) +print("Example for "+map_name) +print("The "+syst+" SF for a Jet with pt="+str(pt) + " GeV and eta="+str(eta) + " for a discriminator value of "+str(discriminator_value)+" is "+str(valsf)) diff --git a/jsonpog-integration/examples/metPhiCorrectionExample.py b/jsonpog-integration/examples/metPhiCorrectionExample.py new file mode 100644 index 0000000000000000000000000000000000000000..38b77c2cb69a995e7129dd470e30740c80ffd3db --- /dev/null +++ b/jsonpog-integration/examples/metPhiCorrectionExample.py @@ -0,0 +1,117 @@ +""" +In this test script, the different MET Phi Corrections are applied to uniform MET pt,phi distributions to check whether the corrections have an effect. +The number of primary vertices are also drawn from a uniform distribution. The run numbers come from a uniform distribution as well but the run ranges (for data) +are fitting the different eras to not cause crashes. +In the end two plots are created. The first one shows the effect of the corrections on the uniform MET phi distribution and the second one shows the same effect +but as a function of the primary vertices. +This is only a technical test, the resulting plots should not be taken too seriously. +""" + +import sys +import correctionlib +import numpy as np +import matplotlib.pyplot as plt + +# get random generator +rng = np.random.default_rng() + +# existing corrections +correction_labels = ["metphicorr_pfmet_mc", "metphicorr_puppimet_mc", "metphicorr_pfmet_data", "metphicorr_puppimet_data"] +# existing eras +eras = ["2018_UL", "2017_UL", "2016postVFP_UL", "2016preVFP_UL"] +# run ranges corresponding to the eras +run_ranges = [[315252, 325274], [297020, 306463], [278769, 284045], [272007, 278771]] +# name of the correction json +infile = "met.json.gz" + +# loop over available eras and run ranges +for era,run_range in zip(eras,run_ranges): + print("\n####################### Era: {} #######################\n".format(era)) + # loop over corrections + for correction_label in correction_labels: + print("\n############ Correction: {} ############\n".format(correction_label)) + + # basic sanity check to find out whether the correction is for data or mc + is_data = None + if ("mc" in correction_label) and (not ("data" in correction_label)): + is_data = False + elif ("data" in correction_label) and (not ("mc" in correction_label)): + is_data = True + else: + print("data and mc are mixed in the correction labels") + exit() + + # load correction set from file + ceval = correctionlib.CorrectionSet.from_file("../POG/JME/{}/{}".format(era,infile)) + + # print keys and values of the correction set + # print(list(ceval.keys())) + # print(list(ceval.values())) + + # print correction name and version + for corr in ceval.values(): + print(f"Correction {corr.name}") + print(f"Version {corr.version}") + + #pts = rng.uniform(low=0.,high=1000.,size=1000000) + # draw the uncorrected met pts from a decaying power law (this is just to get a distribution which crudely resembles the met pt distribution) + pts = (rng.pareto(1.5,size=1000000))*100 + # make sure to not cross the maximum allowed value for uncorrected met pt + pts = np.minimum(pts, np.full_like(pts, 6499, dtype=float)) + # draw uncorrected met phis from a uniform distribution between -pi and pi + phis = rng.uniform(low=-3.14,high=3.14,size=1000000) + # draw number of vertices from a uniform distribution + npvs = rng.integers(low=0,high=200,size=1000000) + # use correct run ranges when working with data, otherwise use uniform run numbers in an arbitrary large window + runs = None + if is_data: + runs = rng.integers(low=run_range[0],high=run_range[1],size=1000000) + else: + runs = rng.integers(low=0,high=100000,size=1000000) + + # print some values of the starting situation + print("uncorrected pts:",pts[1:11]) + print("uncorrected phis:",phis[1:11]) + print("number of vertices:",npvs[1:11]) + print("run numbers:",runs[1:11]) + + # retrieve the corrected pts and phis by using the evaluate method of the corrections + corrected_pts = ceval["pt_{}".format(correction_label)].evaluate(pts,phis,npvs,runs) + corrected_phis = ceval["phi_{}".format(correction_label)].evaluate(pts,phis,npvs,runs) + + # print the corresponding corrected values + print("corrected pts:",corrected_pts[1:11]) + print("corrected phis:",corrected_phis[1:11]) + + # draw the uncorrected and corrected phis in one plot + fig, axs = plt.subplots(1, 2, sharey=True, tight_layout=True) + axs[0].set(xlabel="uncorrected phi") + axs[0].hist(phis, bins=32) + axs[1].set(xlabel="corrected phi") + axs[1].hist(corrected_phis, bins=32) + + #plt.show() + plt.savefig("{}_{}_phi.pdf".format(correction_label,era)) + print("{}_{}_phi.pdf saved".format(correction_label,era)) + + # draw the uncorrected and corrected pts in one plot + fig, axs = plt.subplots(1, 2, sharey=True, tight_layout=True) + axs[0].set(xlabel="uncorrected pt") + axs[0].hist(pts, bins=20, range=[0.,1000.]) + axs[1].set(xlabel="corrected pt") + axs[1].hist(corrected_pts, bins=20, range=[0.,1000.]) + + #plt.show() + plt.savefig("{}_{}_pt.pdf".format(correction_label,era)) + print("{}_{}_pt.pdf saved".format(correction_label,era)) + + # draw the uncorrected and corrected phis as a function of the number of primary vertices in one plot + fig, axs = plt.subplots(1, 2, tight_layout=True, sharey=True) + axs[0].hist2d(npvs, phis, bins=(20,16)) + axs[0].set(xlabel="number of primary vertices", ylabel="uncorrected phi") + axs[1].hist2d(npvs, corrected_phis, bins=(20,16)) + axs[1].set(xlabel="number of primary vertices", ylabel="corrected phi") + + #plt.show() + plt.savefig("{}_{}_phi_vs_npvs.pdf".format(correction_label,era)) + print("{}_{}_phi_vs_npvs.pdf saved".format(correction_label,era)) diff --git a/jsonpog-integration/examples/muonExample.py b/jsonpog-integration/examples/muonExample.py new file mode 100644 index 0000000000000000000000000000000000000000..289be50c8ceca2e11098f3256830199edcdbda54 --- /dev/null +++ b/jsonpog-integration/examples/muonExample.py @@ -0,0 +1,60 @@ +## example how to read the muon format v2 +## (Adapted from JMAR and EGM examples) +from correctionlib import _core + +############################ +## Example A: 2016postVFP ## +############################ + +# Load CorrectionSet +fname = "../POG/MUO/2016postVFP_UL/muon_Z.json.gz" +if fname.endswith(".json.gz"): + import gzip + with gzip.open(fname,'rt') as file: + data = file.read().strip() + evaluator = _core.CorrectionSet.from_string(data) +else: + evaluator = _core.CorrectionSet.from_file(fname) + +# TrackerMuon Reconstruction UL scale factor ==> NOTE the year key has been removed, for consistency with Run 3 +valsf = evaluator["NUM_TrackerMuons_DEN_genTracks"].evaluate(1.1, 50.0, "nominal") +print("sf is: " + str(valsf)) + +# Medium ID UL scale factor, down variation ==> NOTE the year key has been removed, for consistency with Run 3 +valsf = evaluator["NUM_MediumID_DEN_TrackerMuons"].evaluate(0.8, 35.0, "systdown") +print("systdown is: " + str(valsf)) + +# Medium ID UL scale factor, up variation ==> NOTE the year key has been removed, for consistency with Run 3 +valsf = evaluator["NUM_MediumID_DEN_TrackerMuons"].evaluate(0.8, 35.0, "systup") +print("systup is: " + str(valsf)) + +# Trigger UL systematic uncertainty only ==> NOTE the year key has been removed, for consistency with Run 3 +valsyst = evaluator["NUM_IsoMu24_or_IsoTkMu24_DEN_CutBasedIdTight_and_PFIsoTight"].evaluate(1.8, 54.0, "syst") +print("syst is: " + str(valsyst)) + +########################## +## Example B: 2022preEE ## +########################## + +fname = "../POG/MUO/2022_Summer22/muon_Z.json.gz" +if fname.endswith(".json.gz"): + import gzip + with gzip.open(fname,'rt') as file: + data = file.read().strip() + evaluator = _core.CorrectionSet.from_string(data) +else: + evaluator = _core.CorrectionSet.from_file(fname) + +# Medium ID 2022 scale factor using eta as input +valsf_eta = evaluator["NUM_MediumID_DEN_TrackerMuons"].evaluate(-1.1, 45.0, "nominal") +print("sf for eta = -1.1: " + str(valsf_eta)) + +# Medium ID 2022 scale factor using eta as input ==> Note that this value should be the same +# as the previous one, since even though the input can be signed eta, the SFs for 2022 were +# computed for |eta|. This is valid for ALL the years and jsons +valsf_eta = evaluator["NUM_MediumID_DEN_TrackerMuons"].evaluate(1.1, 45.0, "nominal") +print("sf for eta = 1.1 " + str(valsf_eta)) + +# Trigger 2022 systematic uncertainty only +valsyst = evaluator["NUM_IsoMu24_DEN_CutBasedIdMedium_and_PFIsoMedium"].evaluate(-1.8, 54.0, "syst") +print("syst is: " + str(valsyst)) diff --git a/jsonpog-integration/examples/photonExample.py b/jsonpog-integration/examples/photonExample.py new file mode 100644 index 0000000000000000000000000000000000000000..7d6091a62eef231734c3b55f74f282664cfe8869 --- /dev/null +++ b/jsonpog-integration/examples/photonExample.py @@ -0,0 +1,40 @@ +## example how to read the photon format v2 +from correctionlib import _core + +evaluator = _core.CorrectionSet.from_file('./../POG/EGM/2016postVFP_UL/photon.json.gz') + +valsyst= evaluator["UL-Photon-ID-SF"].evaluate("2016postVFP","sfup","Medium",1.1, 34.0) +print("sfup is:"+str(valsyst)) + +valsf= evaluator["UL-Photon-CSEV-SF"].evaluate("2016postVFP","sf","Loose","EBInc") +print("sf is:"+str(valsf)) + +valsf= evaluator["UL-Photon-PixVeto-SF"].evaluate("2016postVFP","sf","Loose","EBInc") +print("sf is:"+str(valsf)) + +valsf= evaluator["UL-Photon-PixVeto-SF"].evaluate("2016postVFP","sfup","Loose","EBInc") +print("sfup is:"+str(valsf)) + +valsf= evaluator["UL-Photon-PixVeto-SF"].evaluate("2016postVFP","sfdown","Loose","EBInc") +print("sfdown is:"+str(valsf)) + + +## example how to read the photon format v3 +from correctionlib import _core + +evaluator = _core.CorrectionSet.from_file('./../POG/EGM/2023Summer23/photon.json.gz') + +valsyst= evaluator["Photon-ID-SF"].evaluate("2023PromptC","sfup","Medium",1.1, 34.0, -1.8) +print("sfup is:"+str(valsyst)) + +valsf= evaluator["Photon-CSEV-SF"].evaluate("2023PromptC","sf","Loose",1.2, 0.85) +print("sf is:"+str(valsf)) + +valsf= evaluator["Photon-PixVeto-SF"].evaluate("2023PromptC","sf","Loose",1.2, 0.98) +print("sf is:"+str(valsf)) + +valsf= evaluator["Photon-PixVeto-SF"].evaluate("2023PromptC","sfup","Loose",1.2, 0.98) +print("sfup is:"+str(valsf)) + +valsf= evaluator["Photon-PixVeto-SF"].evaluate("2023PromptC","sfdown","Loose",1.2, 0.98) +print("sfdown is:"+str(valsf)) diff --git a/jsonpog-integration/examples/tauExample.py b/jsonpog-integration/examples/tauExample.py new file mode 100644 index 0000000000000000000000000000000000000000..24386d488ff9043994a3f209f94fb6a19f1eb6dc --- /dev/null +++ b/jsonpog-integration/examples/tauExample.py @@ -0,0 +1,53 @@ +#! /usr/bin/env python +# Example of how to read the tau JSON file +# For more information, see the README in +# https://gitlab.cern.ch/cms-nanoAOD/jsonpog-integration/-/tree/master/POG/TAU +#import sys; sys.path.insert(0,"correctionlib") # add correctionlib to path +from correctionlib import _core + +# Load CorrectionSet +fname = "../POG/TAU/2018_ReReco/tau.json.gz" +if fname.endswith(".json.gz"): + import gzip + with gzip.open(fname,'rt') as file: + #data = json.load(file) + data = file.read().strip() + cset = _core.CorrectionSet.from_string(data) +else: + cset = _core.CorrectionSet.from_file(fname) + +# Load Correction objects that can be evaluated +corr1 = cset["DeepTau2017v2p1VSjet"] +corr2 = cset["DeepTau2017v2p1VSe"] +corr3 = cset["tau_trigger"] +corr4 = cset["tau_energy_scale"] +pt, eta, dm, genmatch = 25., 1.0, 0, 5 +wp, syst = "Tight", "nom" +print('-'*50) +print("fname=%r"%fname) +print("pt=%.2f, eta=%.1f, dm=%d"%(pt,eta,dm)) +print("wp=%r, syst=%r"%(wp,wp)) +print('-'*50) + +# DeepTau2017v2p1VSjet +sf1 = corr1.evaluate(pt,dm,1,wp,"nom","pt") +sf2 = corr1.evaluate(pt,dm,5,wp,"nom","pt") +print("DeepTau2017v2p1VSjet sf=%.2f (genmatch=1)"%sf1) +print("DeepTau2017v2p1VSjet sf=%.2f (genmatch=5)"%sf2) + +# DeepTau2017v2p1VSe +sf3 = corr2.evaluate(eta,1,wp,syst) +sf4 = corr2.evaluate(eta,5,wp,syst) +print("DeepTau2017v2p1VSe sf=%.2f (genmatch=1)"%sf3) +print("DeepTau2017v2p1VSe sf=%.2f (genmatch=5)"%sf4) + +# etau trigger +sf5 = corr3.evaluate(pt,dm,"etau",wp,"sf",syst) +print("etau trigger sf=%.2f"%sf5) + +# tau energy scale +tes1 = corr4.evaluate(pt,eta,dm,1,"DeepTau2017v2p1",syst) +tes2 = corr4.evaluate(pt,eta,dm,5,"DeepTau2017v2p1",syst) +print("tes=%.2f (genmatch=1)"%tes1) +print("tes=%.2f (genmatch=5)"%tes2) +print('-'*50) diff --git a/jsonpog-integration/misc/LUM/2016postVFP_UL/puWeights.png b/jsonpog-integration/misc/LUM/2016postVFP_UL/puWeights.png new file mode 100644 index 0000000000000000000000000000000000000000..cf3444d64be2832f4313973b4616e8fc1065842e Binary files /dev/null and b/jsonpog-integration/misc/LUM/2016postVFP_UL/puWeights.png differ diff --git a/jsonpog-integration/misc/LUM/2016preVFP_UL/puWeights.png b/jsonpog-integration/misc/LUM/2016preVFP_UL/puWeights.png new file mode 100644 index 0000000000000000000000000000000000000000..0de43c9708fb52b75e432838ecd49ef14e9cfd24 Binary files /dev/null and b/jsonpog-integration/misc/LUM/2016preVFP_UL/puWeights.png differ diff --git a/jsonpog-integration/misc/LUM/2017_UL/puWeights.png b/jsonpog-integration/misc/LUM/2017_UL/puWeights.png new file mode 100644 index 0000000000000000000000000000000000000000..3c26445417018e5f0b4fed097af71d668fb0b18a Binary files /dev/null and b/jsonpog-integration/misc/LUM/2017_UL/puWeights.png differ diff --git a/jsonpog-integration/misc/LUM/2018_UL/puWeights.png b/jsonpog-integration/misc/LUM/2018_UL/puWeights.png new file mode 100644 index 0000000000000000000000000000000000000000..a20861d7c5769bf5d62c5e48222062bed9c05078 Binary files /dev/null and b/jsonpog-integration/misc/LUM/2018_UL/puWeights.png differ diff --git a/jsonpog-integration/misc/LUM/makePUReWeightJSON.py b/jsonpog-integration/misc/LUM/makePUReWeightJSON.py new file mode 100644 index 0000000000000000000000000000000000000000..0f636dbe38da7e7bd7774b86ed2e1319dbda80f1 --- /dev/null +++ b/jsonpog-integration/misc/LUM/makePUReWeightJSON.py @@ -0,0 +1,310 @@ +#!/usr/bin/env python +""" +A script to generate a BinnedValues-JSON file for pileup reweighting of MC +""" +import json +import logging +logger = logging.getLogger(__name__) +import numpy as np + +mcPUProfiles = { + #========# + # 2018 # + #========# + ## mix_2018_25ns_JuneProjectionFull18_PoissonOOTPU_cfi https://github.com/cms-sw/cmssw/blob/master/SimGeneral/MixingModule/python/mix_2018_25ns_JuneProjectionFull18_PoissonOOTPU_cfi.py#L3-L32 + "Autumn18_25ns": ( + np.linspace(0., 100., 101), + [4.695341e-10, 1.206213e-06, 1.162593e-06, 6.118058e-06, 1.626767e-05, 3.508135e-05, 7.12608e-05, 0.0001400641, 0.0002663403, 0.0004867473, 0.0008469, 0.001394142, 0.002169081, 0.003198514, 0.004491138, 0.006036423, 0.007806509, 0.00976048, 0.0118498, 0.01402411, 0.01623639, 0.01844593, 0.02061956, 0.02273221, 0.02476554, 0.02670494, 0.02853662, 0.03024538, 0.03181323, 0.03321895, 0.03443884, 0.035448, 0.03622242, 0.03674106, 0.0369877, 0.03695224, 0.03663157, 0.03602986, 0.03515857, 0.03403612, 0.0326868, 0.03113936, 0.02942582, 0.02757999, 0.02563551, 0.02362497, 0.02158003, 0.01953143, 0.01750863, 0.01553934, 0.01364905, 0.01186035, 0.01019246, 0.008660705, 0.007275915, 0.006043917, 0.004965276, 0.004035611, 0.003246373, 0.002585932, 0.002040746, 0.001596402, 0.001238498, 0.0009533139, 0.0007282885, 0.000552306, 0.0004158005, 0.0003107302, 0.0002304612, 0.0001696012, 0.0001238161, 8.96531e-05, 6.438087e-05, 4.585302e-05, 3.23949e-05, 2.271048e-05, 1.580622e-05, 1.09286e-05, 7.512748e-06, 5.140304e-06, 3.505254e-06, 2.386437e-06, 1.625859e-06, 1.111865e-06, 7.663272e-07, 5.350694e-07, 3.808318e-07, 2.781785e-07, 2.098661e-07, 1.642811e-07, 1.312835e-07, 1.081326e-07, 9.141993e-08, 7.890983e-08, 6.91468e-08, 6.119019e-08, 5.443693e-08, 4.85036e-08, 4.31486e-08, 3.822112e-08] + ), + ## mix_2018_25ns_UltraLegacy_PoissonOOTPU_cfi https://github.com/cms-sw/cmssw/blob/master/SimGeneral/MixingModule/python/mix_2018_25ns_UltraLegacy_PoissonOOTPU_cfi.py + "2018UL_25ns": ( + np.linspace(0., 99., 100), + [8.89374611122e-07, 1.1777062868e-05, 3.99725585118e-05, 0.000129888015252, 0.000265224848687, 0.000313088635109, 0.000353781668514, 0.000508787237162, 0.000873670065767, 0.00147166880932, 0.00228230649018, 0.00330375581273, 0.00466047608406, 0.00624959203029, 0.00810375867901, 0.010306521821, 0.0129512453978, 0.0160303925502, 0.0192913204592, 0.0223108613632, 0.0249798930986, 0.0273973789867, 0.0294402350483, 0.031029854302, 0.0324583524255, 0.0338264469857, 0.0351267479019, 0.0360320204259, 0.0367489568401, 0.0374133183052, 0.0380352633799, 0.0386200967002, 0.039124376968, 0.0394201612616, 0.0394673457109, 0.0391705388069, 0.0384758587461, 0.0372984548399, 0.0356497876549, 0.0334655175178, 0.030823567063, 0.0278340752408, 0.0246009685048, 0.0212676009273, 0.0180250593982, 0.0149129830776, 0.0120582333486, 0.00953400069415, 0.00738546929512, 0.00563442079939, 0.00422052915668, 0.00312446316347, 0.00228717533955, 0.00164064894334, 0.00118425084792, 0.000847785826565, 0.000603466454784, 0.000419347268964, 0.000291768785963, 0.000199761337863, 0.000136624574661, 9.46855200945e-05, 6.80243180179e-05, 4.94806013765e-05, 3.53122628249e-05, 2.556765786e-05, 1.75845711623e-05, 1.23828210848e-05, 9.31669724108e-06, 6.0713272037e-06, 3.95387384933e-06, 2.02760874107e-06, 1.22535149516e-06, 9.79612472109e-07, 7.61730246474e-07, 4.2748847738e-07, 2.41170461205e-07, 1.38701083552e-07, 3.37678010922e-08, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + ), + #========# + # 2017 # + #========# + ## mix_2017_25ns_WinterMC_PUScenarioV1_PoissonOOTPU_cfi https://github.com/cms-sw/cmssw/blob/master/SimGeneral/MixingModule/python/mix_2017_25ns_WinterMC_PUScenarioV1_PoissonOOTPU_cfi.py#L3-L112 + "Fall17_25ns": ( + np.linspace(0., 99., 100), + [3.39597497605e-05, 6.63688402133e-06, 1.39533611284e-05, 3.64963078209e-05, 6.00872171664e-05, 9.33932578027e-05, 0.000120591524486, 0.000128694546198, 0.000361697233219, 0.000361796847553, 0.000702474896113, 0.00133766053707, 0.00237817050805, 0.00389825605651, 0.00594546732588, 0.00856825906255, 0.0116627396044, 0.0148793350787, 0.0179897368379, 0.0208723871946, 0.0232564170641, 0.0249826433945, 0.0262245860346, 0.0272704617569, 0.0283301107549, 0.0294006137386, 0.0303026836965, 0.0309692426278, 0.0308818046328, 0.0310566806228, 0.0309692426278, 0.0310566806228, 0.0310566806228, 0.0310566806228, 0.0307696426944, 0.0300103336052, 0.0288355370103, 0.0273233309106, 0.0264343533951, 0.0255453758796, 0.0235877272306, 0.0215627588047, 0.0195825559393, 0.0177296309658, 0.0160560731931, 0.0146022004183, 0.0134080690078, 0.0129586991411, 0.0125093292745, 0.0124360740539, 0.0123547104433, 0.0123953922486, 0.0124360740539, 0.0124360740539, 0.0123547104433, 0.0124360740539, 0.0123387597772, 0.0122414455005, 0.011705203844, 0.0108187105305, 0.00963985508986, 0.00827210065136, 0.00683770076341, 0.00545237697118, 0.00420456901556, 0.00367513566191, 0.00314570230825, 0.0022917978982, 0.00163221454973, 0.00114065309494, 0.000784838366118, 0.000533204105387, 0.000358474034915, 0.000238881117601, 0.0001984254989, 0.000157969880198, 0.00010375646169, 6.77366175538e-05, 4.39850477645e-05, 2.84298066026e-05, 1.83041729561e-05, 1.17473542058e-05, 7.51982735129e-06, 6.16160108867e-06, 4.80337482605e-06, 3.06235473369e-06, 1.94863396999e-06, 1.23726800704e-06, 7.83538083774e-07, 4.94602064224e-07, 3.10989480331e-07, 1.94628487765e-07, 1.57888581037e-07, 1.2114867431e-07, 7.49518929908e-08, 4.6060444984e-08, 2.81008884326e-08, 1.70121486128e-08, 1.02159894812e-08] + ), + ## mix_2017_25ns_UltraLegacy_PoissonOOTPU_cfi https://github.com/cms-sw/cmssw/blob/master/SimGeneral/MixingModule/python/mix_2017_25ns_UltraLegacy_PoissonOOTPU_cfi.py + "2017UL_25ns": ( + np.linspace(0., 99., 100), + [1.1840841518e-05, 3.46661037703e-05, 8.98772521472e-05, 7.47400487733e-05, 0.000123005176624, 0.000156501700614, 0.000154660478659, 0.000177496185603, 0.000324149805611, 0.000737524009713, 0.00140432980253, 0.00244424508696, 0.00380027898037, 0.00541093042612, 0.00768803501793, 0.010828224552, 0.0146608623707, 0.01887739113, 0.0228418813823, 0.0264817796874, 0.0294637401336, 0.0317960986171, 0.0336645950831, 0.0352638818387, 0.036869429333, 0.0382797316998, 0.039386705577, 0.0398389681346, 0.039646211131, 0.0388392805703, 0.0374195678161, 0.0355377892706, 0.0333383902828, 0.0308286549265, 0.0282914440969, 0.0257860718304, 0.02341635055, 0.0213126338243, 0.0195035612803, 0.0181079838989, 0.0171991315458, 0.0166377598339, 0.0166445341361, 0.0171943735369, 0.0181980997278, 0.0191339792146, 0.0198518804356, 0.0199714909193, 0.0194616474094, 0.0178626975229, 0.0153296785464, 0.0126789254325, 0.0100766041988, 0.00773867100481, 0.00592386091874, 0.00434706240169, 0.00310217013427, 0.00213213401899, 0.0013996000761, 0.000879148859271, 0.000540866009427, 0.000326115560156, 0.000193965828516, 0.000114607606623, 6.74262828734e-05, 3.97805301078e-05, 2.19948704638e-05, 9.72007976207e-06, 4.26179259146e-06, 2.80015581327e-06, 1.14675436465e-06, 2.52452411995e-07, 9.08394910044e-08, 1.14291987912e-08, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + ), + #========# + # 2016 # + #========# + ## mix_2016_25ns_Moriond17MC_PoissonOOTPU_cfi https://github.com/cms-sw/cmssw/blob/master/SimGeneral/MixingModule/python/mix_2016_25ns_Moriond17MC_PoissonOOTPU_cfi.py#L24-L25 + "Moriond17_25ns": ( + np.linspace(0., 75., 76), + [1.78653e-05 ,2.56602e-05 ,5.27857e-05 ,8.88954e-05 ,0.000109362 ,0.000140973 ,0.000240998 ,0.00071209 ,0.00130121 ,0.00245255 ,0.00502589 ,0.00919534 ,0.0146697 ,0.0204126 ,0.0267586 ,0.0337697 ,0.0401478 ,0.0450159 ,0.0490577 ,0.0524855 ,0.0548159 ,0.0559937 ,0.0554468 ,0.0537687 ,0.0512055 ,0.0476713 ,0.0435312 ,0.0393107 ,0.0349812 ,0.0307413 ,0.0272425 ,0.0237115 ,0.0208329 ,0.0182459 ,0.0160712 ,0.0142498 ,0.012804 ,0.011571 ,0.010547 ,0.00959489 ,0.00891718 ,0.00829292 ,0.0076195 ,0.0069806 ,0.0062025 ,0.00546581 ,0.00484127 ,0.00407168 ,0.00337681 ,0.00269893 ,0.00212473 ,0.00160208 ,0.00117884 ,0.000859662 ,0.000569085 ,0.000365431 ,0.000243565 ,0.00015688 ,9.88128e-05 ,6.53783e-05 ,3.73924e-05 ,2.61382e-05 ,2.0307e-05 ,1.73032e-05 ,1.435e-05 ,1.36486e-05 ,1.35555e-05 ,1.37491e-05 ,1.34255e-05 ,1.33987e-05 ,1.34061e-05 ,1.34211e-05 ,1.34177e-05 ,1.32959e-05 ,1.33287e-05] + ), + ## mix_2016_25ns_SpringMC_PUScenarioV1_PoissonOOTPU_cfi https://github.com/cms-sw/cmssw/blob/master/SimGeneral/MixingModule/python/mix_2016_25ns_SpringMC_PUScenarioV1_PoissonOOTPU_cfi.py#L24-L75 + "Spring16_25ns": ( + np.linspace(0., 50., 51), + [ 0.000829312873542, 0.00124276120498, 0.00339329181587, 0.00408224735376, 0.00383036590008, 0.00659159288946, 0.00816022734493, 0.00943640833116, 0.0137777376066, 0.017059392038, 0.0213193035468, 0.0247343174676, 0.0280848773878, 0.0323308476564, 0.0370394341409, 0.0456917721191, 0.0558762890594, 0.0576956187107, 0.0625325287017, 0.0591603758776, 0.0656650815128, 0.0678329011676, 0.0625142146389, 0.0548068448797, 0.0503893295063, 0.040209818868, 0.0374446988111, 0.0299661572042, 0.0272024759921, 0.0219328403791, 0.0179586571619, 0.0142926728247, 0.00839941654725, 0.00522366397213, 0.00224457976761, 0.000779274977993, 0.000197066585944, 7.16031761328e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] + ), + ## mix_2016_25ns_UltraLegacy_PoissonOOTPU_cfi https://github.com/cms-sw/cmssw/blob/master/SimGeneral/MixingModule/python/mix_2016_25ns_UltraLegacy_PoissonOOTPU_cfi.py + "2016UL_25ns": ( + np.linspace(0., 99., 100), + [1.00402360149e-05, 5.76498797172e-05, 7.37891400294e-05, 0.000110932895295, 0.000158857714773, 0.000368637432599, 0.000893114107873, 0.00189700774575, 0.00358880167437, 0.00636052573486, 0.0104173961179, 0.0158122597405, 0.0223785660712, 0.0299186888073, 0.0380275944896, 0.0454313901624, 0.0511181088317, 0.0547434577348, 0.0567906239028, 0.0577145461461, 0.0578176902735, 0.0571251566494, 0.0555456541498, 0.053134383488, 0.0501519041462, 0.0466815838899, 0.0429244592524, 0.0389566776898, 0.0348507152776, 0.0307356862528, 0.0267712092206, 0.0229720184534, 0.0193388653099, 0.0159602510813, 0.0129310510552, 0.0102888654183, 0.00798782770975, 0.00606651703058, 0.00447820948367, 0.00321589786478, 0.0022450422045, 0.00151447388514, 0.000981183695515, 0.000609670479759, 0.000362193408119, 0.000211572646801, 0.000119152364744, 6.49133515399e-05, 3.57795801581e-05, 1.99043569043e-05, 1.13639319832e-05, 6.49624103579e-06, 3.96626216416e-06, 2.37910222874e-06, 1.50997403362e-06, 1.09816650247e-06, 7.31298519122e-07, 6.10398791529e-07, 3.74845774388e-07, 2.65177281359e-07, 2.01923536742e-07, 1.39347583555e-07, 8.32600052913e-08, 6.04932421298e-08, 6.52536630583e-08, 5.90574603808e-08, 2.29162474068e-08, 1.97294602668e-08, 1.7731096903e-08, 3.57547932012e-09, 1.35039815662e-09, 8.50071242076e-09, 5.0279187473e-09, 4.93736669066e-10, 8.13919708923e-10, 5.62778926097e-09, 5.15140589469e-10, 8.21676746568e-10, 0.0, 1.49166873577e-09, 8.43517992503e-09, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + ), + #========# + # 2015 # + #========# + ## mix_2015_25ns_FallMC_matchData_PoissonOOTPU_cfi https://github.com/cms-sw/cmssw/blob/master/SimGeneral/MixingModule/python/mix_2015_25ns_FallMC_matchData_PoissonOOTPU_cfi.py#L24-L75 + "Fall15_25ns": ( + np.linspace(0., 50., 51), + [0.000108643, 0.000388957, 0.000332882, 0.00038397, 0.000549167, 0.00105412, 0.00459007, 0.0210314, 0.0573688, 0.103986, 0.142369, 0.157729, 0.147685, 0.121027, 0.08855, 0.0582866, 0.0348526, 0.019457, 0.0107907, 0.00654313, 0.00463195, 0.00370927, 0.0031137, 0.00261141, 0.00215499, 0.00174491, 0.00138268, 0.00106731, 0.000798828, 0.00057785, 0.00040336, 0.00027161, 0.000176535, 0.00011092, 6.75502e-05, 4.00323e-05, 2.32123e-05, 1.32585e-05, 7.51611e-06, 4.25902e-06, 2.42513e-06, 1.39077e-06, 8.02452e-07, 4.64159e-07, 2.67845e-07, 1.5344e-07, 8.68966e-08, 4.84931e-08, 2.6606e-08, 1.433e-08] + ), + ## mix_2015_25ns_HiLum_PoissonOOTPU_cfi https://github.com/cms-sw/cmssw/blob/master/SimGeneral/MixingModule/python/mix_2015_25ns_HiLum_PoissonOOTPU_cfi.py#L24-L77 + "Spring15_25ns": ( + np.linspace(0., 52., 53), + [4.8551E-07, 1.74806E-06, 3.30868E-06, 1.62972E-05, 4.95667E-05, 0.000606966, 0.003307249, 0.010340741, 0.022852296, 0.041948781, 0.058609363, 0.067475755, 0.072817826, 0.075931405, 0.076782504, 0.076202319, 0.074502547, 0.072355135, 0.069642102, 0.064920999, 0.05725576, 0.047289348, 0.036528446, 0.026376131, 0.017806872, 0.011249422, 0.006643385, 0.003662904, 0.001899681, 0.00095614, 0.00050028, 0.000297353, 0.000208717, 0.000165856, 0.000139974, 0.000120481, 0.000103826, 8.88868E-05, 7.53323E-05, 6.30863E-05, 5.21356E-05, 4.24754E-05, 3.40876E-05, 2.69282E-05, 2.09267E-05, 1.5989E-05, 4.8551E-06, 2.42755E-06, 4.8551E-07, 2.42755E-07, 1.21378E-07, 4.8551E-08] + ) + } + +def getHist(fName, hName="pileup"): + from cppyy import gbl + tf = gbl.TFile.Open(fName) + if not tf: + raise RuntimeError("Could not open file '{0}'".format(fName)) + hist = tf.Get(hName) + if not hist: + raise RuntimeError("No histogram with name '{0}' found in file '{1}'".format(hName, fName)) + return tf, hist + +def normAndExtract(hist, norm=1.): + nB = hist.GetNbinsX() + xAx = hist.GetXaxis() + if norm: + hist.Scale(norm/(hist.Integral()*(xAx.GetXmax()-xAx.GetXmin())/nB)) + bEdges = np.array([ xAx.GetBinLowEdge(i) for i in range(1,nB+1) ]+[ xAx.GetBinUpEdge(nB) ]) + contents = np.array([ hist.GetBinContent(i) for i in range(1,nB+1) ]) + return bEdges, contents + +def getRatio(numBins, numCont, denBins, denCont): + ## use numerator for output format + if not all(db in numBins for db in denBins): + raise RuntimeError("Numerator (data) needs to have at least the bin edges that are the denominator (MC)") + ## ratios for the common range + xMinC, xMaxC = denBins[0], denBins[-1] + inMn = np.where(numBins == xMinC)[0][0] + inMx = np.where(numBins == xMaxC)[0][0] + ratio = np.zeros((inMx-inMn,)) + di = 0 + for ni in range(inMn, inMx): + if numBins[ni+1] > denBins[di+1]: + di += 1 + assert ( denBins[di] <= numBins[ni] ) and ( numBins[ni+1] <= denBins[di+1] ) + if denCont[di] == 0.: + ratio[ni-inMn] = 1. ## not in denominator -> will not be used, so any value works + else: + ratio[ni-inMn] = numCont[ni]/denCont[di] + bR = np.array(numBins[inMn:inMx+1]) + ## extend range of outside ratio bins until end of numerator ranges + bR[0] = numBins[0] + bR[-1] = numBins[-1] + return bR, ratio + +def main(): + import argparse + parser = argparse.ArgumentParser(description="Produce a BinnedValues-JSON file for pileup reweighting, using data pileup distributions obtained with `pileupCalc.py -i analysis-lumi-json.txt --inputLumiJSON pileup-json.txt --calcMode true --minBiasXsec MBXSECINNB --maxPileupBin NMAX --numPileupBins N outname.root` (see also https://twiki.cern.ch/twiki/bin/viewauth/CMS/PileupJSONFileforData#Pileup_JSON_Files_For_Run_II)") + parser.add_argument("-o", "--output", default="puweights.json", type=str, help="Output file name") + parser.add_argument("-f", "--format", type=str, choices=["correctionlib", "cp3-llbb"], default="cp3-llbb", help="Output JSON format") + parser.add_argument("--name", type=str, default="puweights", help="Name of the correction inside the CorrectionSet (only used for the correctionlib format)") + parser.add_argument("--mcprofile", help="Pileup profile used to generate the MC sample (use --listmcprofiles to see the list of defined profiles)") + parser.add_argument("--listmcprofiles", action="store_true", help="list the available MC pileup profiles") + parser.add_argument("--nominal", type=str, help="File with the data (true) pileup distribution histogram assuming the nominal minimum bias cross-section value") + parser.add_argument("--up", type=str, help="File with the data (true) pileup distribution histogram assuming the nominal+1sigma minimum bias cross-section value") + parser.add_argument("--down", type=str, help="File with the data (true) pileup distribution histogram assuming the nominal-1sigma minimum bias cross-section value") + parser.add_argument("--rebin", type=int, help="Factor to rebin the data histograms by") + parser.add_argument("--makePlot", action="store_true", help="Make a plot of the PU profiles and weights (requires matplotlib)") + parser.add_argument("mcfiles", nargs="*", help="MC ROOT files to extract a pileup profile from (if used)") + parser.add_argument("--mctreename", type=str, default="Events", help="Name of the tree to use in mcfiles") + parser.add_argument("--mcreweightvar", type=str, default="Pileup_nTrueInt", help="Name of the branch in the tree of the mcfiles to use for getting a histogram") + parser.add_argument("-v", "--verbose", action="store_true", help="Print verbose output") + parser.add_argument("--gzip", action="store_true", help="Save the output as gzip file") + args = parser.parse_args() + logging.basicConfig(level=(logging.DEBUG if args.verbose else logging.INFO)) + if args.makePlot: + try: + import matplotlib + matplotlib.use("agg") + from matplotlib import pyplot as plt + except Exception as ex: + logger.warning("matplotlib could not be imported, so no plot will be produced") + args.makePlot = False + if args.gzip: + try: + import gzip, io + except Exception as ex: + logger.warning("gzip or io could not be imported, output will be stored as regular file") + args.gzip = False + if args.listmcprofiles: + logger.info("The known PU profiles are: {0}".format(", ".join(repr(k) for k in mcPUProfiles))) + return + elif args.mcfiles: + if args.mcprofile: + logger.warning("MC PU profile and MC files are passed - extracting from the files") + logger.info("Extracting the MC profile from {0} in the {1} tree of: {2}".format(args.mcreweightvar, args.mctreename, ", ".join(args.mcfiles))) + from cppyy import gbl + tup = gbl.TChain(args.mctreename) + for mcfn in args.mcfiles: + tup.Add(mcfn) + hMCPU = gbl.TH1F("hMCPU", "MC PU profile", 100, 0., 100.) + tup.Draw(f"{args.mcreweightvar}>>hMCPU", "", "GOFF") + mcPUBins, mcPUVals = normAndExtract(hMCPU) + elif args.mcprofile: + if args.mcprofile not in mcPUProfiles: + raise ValueError("No MC PU profile with tag '{0}' is known".format(args.mcprofile)) + + mcPUBins, mcPUVals = mcPUProfiles[args.mcprofile] + if len(mcPUBins) != len(mcPUVals)+1: + logger.verbose(len(mcPUBins), len(mcPUVals)) + else: + raise RuntimeError("Either one of --listmcprofiles or --mcprofile, or a list of MC files to extract a MC profile from, must be passed") + + if not args.nominal: + raise RuntimeError("No --nominal argument") + + + fNom, hNom = getHist(args.nominal) + if args.rebin: + hNom.Rebin(args.rebin) + nomBins, nomCont = normAndExtract(hNom) + ratioBins, nomRatio = getRatio(nomBins, nomCont, mcPUBins, mcPUVals) + + upCont, upRatio, downCont, downRatio = None, None, None, None + if bool(args.up) != bool(args.down): + raise ValueError("If either one of --up and --down is specified, both should be") + if args.up and args.down: + fUp, hUp = getHist(args.up) + if args.rebin: + hUp.Rebin(args.rebin) + upBins, upCont = normAndExtract(hUp) + #if not all(ub == nb for ub,nb in zip(upBins, nomBins)): + # raise RuntimeError("Up-variation and nominal binning is different: {0} vs {1}".format(upBins, nomBins)) + _, upRatio = getRatio(upBins, upCont, mcPUBins, mcPUVals) + fDown, hDown = getHist(args.down) + if args.rebin: + hDown.Rebin(args.rebin) + downBins, downCont = normAndExtract(hDown) + #if not all(db == nb for db,nb in zip(downBins, nomBins)): + # raise RuntimeError("Up-variation and nominal binning is different: {0} vs {1}".format(upBins, nomBins)) + _, downRatio = getRatio(downBins, downCont, mcPUBins, mcPUVals) + + if args.format == "correctionlib": + out = { + "schema_version": 2, + "corrections": [{ + "name": args.name, + "version" : 0, + "inputs": [ + { + "name": "NumTrueInteractions", + "type": "real", + "description": "Number of true interactions" + }, + { + "name": "weights", + "type": "string", + "description": "nominal, up, or down" + } + ], + "output": { + "name": "weight", + "type": "real", + "description": "Event weight for pileup reweighting" + }, + "data": { + "nodetype": "category", + "input": "weights", + "content": ([{ + "key": "nominal", + "value": { + "nodetype": "binning", + "input": "NumTrueInteractions", + "flow": "clamp", + "edges": list(ratioBins), + "content": list(nomRatio) + }}]+([{ + "key": "up", + "value": { + "nodetype": "binning", + "input": "NumTrueInteractions", + "flow": "clamp", + "edges": list(ratioBins), + "content": list(upRatio) + }}] if upRatio is not None else [] + )+([{ + "key": "down", + "value": { + "nodetype": "binning", + "input": "NumTrueInteractions", + "flow": "clamp", + "edges": list(ratioBins), + "content": list(downRatio) + }}] if downRatio is not None else []) + ) + } + }] + } + elif args.format == "cp3-llbb": + out = { + "dimension" : 1, + "variables" : ["NumTrueInteractions"], + "binning" : {"x": list(ratioBins)}, + "error_type" : "absolute", + "data" : [ + { + "bin" : [ratioBins[i], ratioBins[i+1]], + "value" : nomRatio[i], + "error_low" : (nomRatio[i]-downRatio[i] if downRatio is not None else 0.), + "error_high" : (upRatio[i]-nomRatio[i] if upRatio is not None else 0.) + } for i in range(nomRatio.shape[0]) + ], + } + else: + raise ValueError(f"Unsupported output format: {args.format}") + if args.gzip: + outN = args.output + if not outN.endswith(".gz"): + outN = outN+".gz" + with gzip.open(outN, "wb") as outF, io.TextIOWrapper(outF, encoding="utf-8") as outE: + json.dump(out, outE) + else: + with open(args.output, "w") as outF: + json.dump(out, outF) + + if args.makePlot: + fig,(ax,rax) = plt.subplots(2,1, figsize=(6,6), sharex=True) + rax.set_yscale("log", nonposy="clip") + #rax = ax.twinx() + dBinCenters = .5*(mcPUBins[:-1]+mcPUBins[1:]) + nBinCenters = .5*(nomBins[:-1]+nomBins[1:]) + rBinCenters = .5*(ratioBins[:-1]+ratioBins[1:]) + ax.hist(dBinCenters, bins=mcPUBins, weights=mcPUVals, histtype="step", label="MC") + ax.hist(nBinCenters, bins=nomBins, weights=nomCont, histtype="step", label="Nominal", color="k") + rax.hist(rBinCenters, bins=ratioBins, weights=nomRatio, histtype="step", color="k") + if upCont is not None: + ax.hist(nBinCenters, bins=nomBins, weights=upCont, histtype="step", label="Up", color="r") + ax.hist(nBinCenters, bins=nomBins, weights=downCont, histtype="step", label="Down", color="b") + rax.hist(rBinCenters, bins=ratioBins, weights=upRatio, histtype="step", color="r") + rax.hist(rBinCenters, bins=ratioBins, weights=downRatio, histtype="step", color="b") + rax.axhline(1.) + ax.legend() + rax.set_ylim(.02, 2.) + rax.set_xlim(ratioBins[0], ratioBins[-1]) + if args.mcfiles: + rax.set_xlabel(args.mcreweightvar) + elif args.mcprofile: + ax.set_title(args.mcprofile) + if args.output.endswith(".json"): + plt.savefig(args.output.replace(".json", ".png")) + +if __name__ == "__main__": + main() diff --git a/jsonpog-integration/misc/LUM/puWeightsJSONUL.sh b/jsonpog-integration/misc/LUM/puWeightsJSONUL.sh new file mode 100755 index 0000000000000000000000000000000000000000..deb1a97943ee7e5bb961e6ec3a9e8d38cabbff2b --- /dev/null +++ b/jsonpog-integration/misc/LUM/puWeightsJSONUL.sh @@ -0,0 +1,14 @@ +scriptName="makePUReWeightJSON.py" + +python "${scriptName}" --nominal=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2016-preVFP-69200ub-99bins.root --up=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2016-preVFP-72400ub-99bins.root --down=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2016-preVFP-66000ub-99bins.root --makePlot -o 2016preVFP_UL/puWeights.json --gzip --mcprofile=2016UL_25ns --format=correctionlib --name=Collisions16_UltraLegacy_goldenJSON + +python "${scriptName}" --nominal=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2016-postVFP-69200ub-99bins.root --up=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2016-postVFP-72400ub-99bins.root --down=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2016-postVFP-66000ub-99bins.root --makePlot -o 2016postVFP_UL/puWeights.json --gzip --mcprofile=2016UL_25ns --format=correctionlib --name=Collisions16_UltraLegacy_goldenJSON + +python "${scriptName}" --nominal=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions17/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2017-69200ub-99bins.root --up=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions17/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2017-72400ub-99bins.root --down=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions17/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2017-66000ub-99bins.root --makePlot -o 2017_UL/puWeights.json --gzip --mcprofile=2017UL_25ns --format=correctionlib --name=Collisions17_UltraLegacy_goldenJSON + +python "${scriptName}" --nominal=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions18/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2018-69200ub-99bins.root --up=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions18/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2018-72400ub-99bins.root --down=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions18/13TeV/PileUp/UltraLegacy/PileupHistogram-goldenJSON-13tev-2018-66000ub-99bins.root --makePlot -o 2018_UL/puWeights.json --gzip --mcprofile=2018UL_25ns --format=correctionlib --name=Collisions18_UltraLegacy_goldenJSON + +for file in */puWeights.json.gz +do + mv $file ../../POG/LUM/$file +done diff --git a/jsonpog-integration/script/.gitkeep b/jsonpog-integration/script/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/jsonpog-integration/script/compareFiles.py b/jsonpog-integration/script/compareFiles.py new file mode 100755 index 0000000000000000000000000000000000000000..bb3b791f536c39771d718ac48a0ebf4a3160eee8 --- /dev/null +++ b/jsonpog-integration/script/compareFiles.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python + +import argparse +import os +import json + +import correctionlib +import correctionlib.schemav2 + +from rich.console import Console +from rich.columns import Columns + + +class Report: + def __init__(self): + self.corr_added = list() # added corrections + self.corr_removed = set() # (name, version) of removed corrections + self.corr_updated = list() # (old, new) for modified corrections + self.corr_version_error = set() # (name, version) of modified corrections but where the version number wasn't bumped + + def generate(self, files): + # Some ugly mix of markdown for gitlab and rich console output... + + def _print_cv(lst, msg=""): + for c,v in lst: + console.print(f" * `{c}` -> version `{v}`{msg}") + + console = Console(width=100, color_system=None) + + console.print(f"\n### Comparison: file {files[1]}\n") + console.print(f"Old file: `{files[0]}`\n") + + if len(self.corr_version_error): + console.print("\n#### Version errors\n") + console.print("These corrections should increase their version numbers:\n") + _print_cv(self.corr_version_error, msg=" is already in use") + + if len(self.corr_removed): + console.print("\n#### Removed corrections\n") + _print_cv(self.corr_removed) + + if len(self.corr_added): + console.print("\n#### Added corrections\n") + for added in self.corr_added: + console.print(added) + + if len(self.corr_updated): + console.print("\n#### Updated corrections\n") + for old, new in self.corr_updated: + console.print(Columns([old, new], width=40, equal=True, expand=True)) + + if len(self.corr_version_error) + \ + len(self.corr_removed) + \ + len(self.corr_added) + \ + len(self.corr_updated) == 0: + console.print("No significant difference in the corrections") + + def status_code(self): + if len(self.corr_version_error): + return 1 + return 0 + + +def compare_corrections(c1, c2): + """Return False if the two corrections differ in their content + Differences in name, version or description are not considered""" + + for key in ["inputs", "output", "data", "generic_formulas"]: + if c1.get(key) != c2.get(key): + return False + return True + + +def compare_files(old, new): + corrs_old = old["corrections"] + names_vers_old = { (c["name"], c["version"]) for c in corrs_old } + names_old = { nv[0] for nv in names_vers_old } + corrs_new = new["corrections"] + names_vers_new = { (c["name"], c["version"]) for c in corrs_new } + + report = Report() + report.corrs_removed = names_vers_old - names_vers_new + + for corr in corrs_new: + name = corr["name"] + vers = corr["version"] + + # new correction name, not present in old file + if name not in names_old: + report.corr_added.append(correctionlib.schemav2.Correction.parse_obj(corr)) + continue + + # we know there is a correction with the same name in the old file + corrs_old_same_name = [ c for c in corrs_old if c["name"] == name ] + + # -> check if there is one with the same version + old_same_version = next((c for c in corrs_old_same_name if c["version"] == vers), None) + + if old_same_version: + # same version but different content -> problem! + if not compare_corrections(old_same_version, corr): + report.corr_version_error.add((name, vers)) + continue + + # different version -> check the version increased + old_max_version = max(corrs_old_same_name, key=lambda c: c["version"]) + if old_max_version["version"] > vers: + report.corr_version_error.add((name, vers)) + else: + report.corr_updated.append( + (correctionlib.schemav2.Correction.parse_obj(old_max_version), + correctionlib.schemav2.Correction.parse_obj(corr))) + + return report + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Compares the content of two correction JSON files. Assumes their content are valid schemav2.") + parser.add_argument("files", nargs=2, help="Two json files to compare: old and new") + args = parser.parse_args() + + def json_load(path): + data = correctionlib.highlevel.open_auto(path) + return json.loads(data) + files = map(json_load, args.files) + + report = compare_files(*files) + report.generate(args.files) + + exit(report.status_code()) + diff --git a/jsonpog-integration/script/fancyTable/LICENSE b/jsonpog-integration/script/fancyTable/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..a8c0a5493a2409679064378d387848a2b9a5d535 --- /dev/null +++ b/jsonpog-integration/script/fancyTable/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Johan Johansson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/jsonpog-integration/script/fancyTable/fancyTable.min.js b/jsonpog-integration/script/fancyTable/fancyTable.min.js new file mode 100644 index 0000000000000000000000000000000000000000..5a912f1627f325a45dfa3a7aef1d1cc2c1e138dd --- /dev/null +++ b/jsonpog-integration/script/fancyTable/fancyTable.min.js @@ -0,0 +1,9 @@ +/*! + * jQuery fancyTable plugin v1.0.33 + * https://github.com/myspace-nu + * + * Copyright 2018 Johan Johansson + * Released under the MIT license + */ + +!function(i){i.fn.fancyTable=function(a){var o=i.extend({inputStyle:"",inputPlaceholder:"Search...",pagination:!1,paginationClass:"btn btn-light",paginationClassActive:"active",pagClosest:3,perPage:10,sortable:!0,searchable:!0,matchCase:!1,exactMatch:!1,localeCompare:!1,onInit:function(){},onUpdate:function(){},sortFunction:function(a,e,t,n,r){return a==e&&n&&r?t.rowSortOrder[i(n).data("rowid")]>t.rowSortOrder[i(r).data("rowid")]:"numeric"==t.sortAs[t.sortColumn]?0<t.sortOrder?parseFloat(a)-parseFloat(e):parseFloat(e)-parseFloat(a):o.localeCompare?a.localeCompare(e)<0?-t.sortOrder:0<a.localeCompare(e)?t.sortOrder:0:a<e?-t.sortOrder:e<a?t.sortOrder:0},testing:!1},a),l=this;return this.settings=o,this.tableUpdate=function(n){if(n.fancyTable.matches=0,i(n).find("tbody tr").each(function(){var a=0,e=!0,t=!1;i(this).find("td").each(function(){o.globalSearch||!n.fancyTable.searchArr[a]||l.isSearchMatch(i(this).html(),n.fancyTable.searchArr[a])?!o.globalSearch||n.fancyTable.search&&!l.isSearchMatch(i(this).html(),n.fancyTable.search)||Array.isArray(o.globalSearchExcludeColumns)&&o.globalSearchExcludeColumns.includes(a+1)||(t=!0):e=!1,a++}),o.globalSearch&&t||!o.globalSearch&&e?(n.fancyTable.matches++,!o.pagination||n.fancyTable.matches>n.fancyTable.perPage*(n.fancyTable.page-1)&&n.fancyTable.matches<=n.fancyTable.perPage*n.fancyTable.page?i(this).show():i(this).hide()):i(this).hide()}),n.fancyTable.pages=Math.ceil(n.fancyTable.matches/n.fancyTable.perPage),o.pagination){var a=n.fancyTable.paginationElement?i(n.fancyTable.paginationElement):i(n).find(".pag");a.empty();for(var e,t=1;t<=n.fancyTable.pages;t++)(1==t||t>n.fancyTable.page-(o.pagClosest+1)&&t<n.fancyTable.page+(o.pagClosest+1)||t==n.fancyTable.pages)&&(e=i("<a>",{html:t,"data-n":t,style:"margin:0.2em",class:o.paginationClass+" "+(t==n.fancyTable.page?o.paginationClassActive:"")}).css("cursor","pointer").bind("click",function(){n.fancyTable.page=i(this).data("n"),l.tableUpdate(n)}),t==n.fancyTable.pages&&n.fancyTable.page<n.fancyTable.pages-o.pagClosest-1&&a.append(i("<span>...</span>")),a.append(e),1==t&&n.fancyTable.page>o.pagClosest+2&&a.append(i("<span>...</span>")))}o.onUpdate.call(this,n)},this.isSearchMatch=function(a,e){if(o.matchCase||(a=a.toUpperCase(),e=e.toUpperCase()),"auto"==o.exactMatch&&e.match(/^".*?"$/))return a==(e=e.substring(1,e.length-1));if("auto"==o.exactMatch&&e.replace(/\s+/g,"").match(/^[<>]=?/)){var t=e.replace(/\s+/g,"").match(/^[<>]=?/)[0],n=e.replace(/\s+/g,"").substring(t.length);return">"==t&&+n<+a||"<"==t&&+a<+n||">="==t&&+n<=+a||"<="==t&&+a<=+n}if("auto"==o.exactMatch&&e.replace(/\s+/g,"").match(/^.+(\.\.|-).+$/)){n=e.replace(/\s+/g,"").split(/\.\.|-/);return+a>=+n[0]&&+a<=+n[1]}try{return!0===o.exactMatch?a==e:new RegExp(e).test(a)}catch{return!1}},this.reinit=function(){i(this).each(function(){i(this).find("th a").contents().unwrap(),i(this).find("tr.fancySearchRow").remove()}),i(this).fancyTable(this.settings)},this.tableSort=function(r){var a,e;void 0!==r.fancyTable.sortColumn&&r.fancyTable.sortColumn<r.fancyTable.nColumns&&(a=0,i(r).find("thead th").each(function(){i(this).attr("aria-sort",a==r.fancyTable.sortColumn?1==r.fancyTable.sortOrder?"ascending":-1==r.fancyTable.sortOrder?"descending":"other":null),a++}),i(r).find("thead th div.sortArrow").each(function(){i(this).remove()}),(e=i("<div>",{class:"sortArrow"}).css({margin:"0.1em",display:"inline-block",width:0,height:0,"border-left":"0.4em solid transparent","border-right":"0.4em solid transparent"})).css(0<r.fancyTable.sortOrder?{"border-top":"0.4em solid #000"}:{"border-bottom":"0.4em solid #000"}),i(r).find("thead th a").eq(r.fancyTable.sortColumn).append(e),e=i(r).find("tbody tr").toArray().sort(function(a,e){var t=i(a).find("td").eq(r.fancyTable.sortColumn),n=i(e).find("td").eq(r.fancyTable.sortColumn),t=i(t).attr("data-sortvalue")?i(t).data("sortvalue"):t.html(),n=i(n).attr("data-sortvalue")?i(n).data("sortvalue"):n.html();return"case-insensitive"==r.fancyTable.sortAs[r.fancyTable.sortColumn]&&(t=t.toLowerCase(),n=n.toLowerCase()),o.sortFunction.call(this,t,n,r.fancyTable,a,e)}),i(e).each(function(a){r.fancyTable.rowSortOrder[i(this).data("rowid")]=a}),i(r).find("tbody").empty().append(e))},this.each(function(){if("TABLE"!==i(this).prop("tagName"))return console.warn("fancyTable: Element is not a table."),!0;var e,t,a,n,r,s=this;s.fancyTable={nColumns:i(s).find("td").first().parent().find("td").length,nRows:i(this).find("tbody tr").length,perPage:o.perPage,page:1,pages:0,matches:0,searchArr:[],search:"",sortColumn:o.sortColumn,sortOrder:void 0!==o.sortOrder&&(new RegExp("desc","i").test(o.sortOrder)||-1==o.sortOrder)?-1:1,sortAs:[],paginationElement:o.paginationElement},s.fancyTable.rowSortOrder=new Array(s.fancyTable.nRows),0==i(s).find("tbody").length&&(n=i(s).html(),i(s).empty(),i(s).append("<tbody>").append(i(n))),0==i(s).find("thead").length&&i(s).prepend(i("<thead>")),i(s).find("tbody tr").each(function(a){i(this).data("rowid",a)}),o.sortable&&(e=0,i(s).find("thead th").each(function(){s.fancyTable.sortAs.push("numeric"==i(this).data("sortas")?"numeric":"case-insensitive"==i(this).data("sortas")?"case-insensitive":null);var a=i(this).html(),a=i("<a>",{href:"#","aria-label":"Sort by "+i(this).text(),html:a,"data-n":e,class:""}).css({cursor:"pointer",color:"inherit","text-decoration":"none","white-space":"nowrap"}).bind("click",function(){return s.fancyTable.sortColumn==i(this).data("n")?s.fancyTable.sortOrder=-s.fancyTable.sortOrder:s.fancyTable.sortOrder=1,s.fancyTable.sortColumn=i(this).data("n"),l.tableSort(s),l.tableUpdate(s),!1});i(this).empty(),i(this).append(a),e++})),o.searchable&&(t=i("<tr>").addClass("fancySearchRow"),o.globalSearch?(a=i("<input>",{"aria-label":"Search table",placeholder:o.inputPlaceholder,style:"width:100%;box-sizing:border-box;"+o.inputStyle}).bind("change paste keyup",function(){s.fancyTable.search=i(this).val(),s.fancyTable.page=1,l.tableUpdate(s)}),n=i("<th>",{style:"padding:2px;"}).attr("colspan",s.fancyTable.nColumns),i(a).appendTo(i(n)),i(n).appendTo(i(t))):(r=0,i(s).find("td").first().parent().find("td").each(function(){s.fancyTable.searchArr.push("");var a=i("<input>",{"aria-label":"Search column","data-n":r,placeholder:o.inputPlaceholder,style:"width:100%;box-sizing:border-box;"+o.inputStyle}).bind("change paste keyup",function(){s.fancyTable.searchArr[i(this).data("n")]=i(this).val(),s.fancyTable.page=1,l.tableUpdate(s)}),e=i("<th>",{style:"padding:2px;"});i(a).appendTo(i(e)),i(e).appendTo(i(t)),r++})),t.appendTo(i(s).find("thead"))),l.tableSort(s),o.pagination&&!o.paginationElement&&(i(s).find("tfoot").remove(),i(s).append(i("<tfoot><tr></tr></tfoot>")),i(s).find("tfoot tr").append(i("<td class='pag'></td>",{}).attr("colspan",s.fancyTable.nColumns))),l.tableUpdate(s),o.onInit.call(this,s)}),this}}(jQuery); \ No newline at end of file diff --git a/jsonpog-integration/script/generate_html.py b/jsonpog-integration/script/generate_html.py new file mode 100755 index 0000000000000000000000000000000000000000..2d5e57e8e885210fa85ff4fa27fcdc08669527e4 --- /dev/null +++ b/jsonpog-integration/script/generate_html.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python + +import os +import argparse +import pathlib as pl +import re +import pandas as pd +from rich.console import Console +import shutil + +from correctionlib.highlevel import model_auto, open_auto + + +template = \ +"""<!DOCTYPE html> +<html> +<head> + <title>Summary of common POG JSONs</title> + <link href="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/4.1.3/css/bootstrap.min.css" rel="stylesheet"> +</head> +<body> + <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.6.1/jquery.min.js"></script> + <script src="fancyTable/fancyTable.min.js"></script> + + <h3>Summary of common POG JSONs</h3> + + #TABLE# + + <script type="text/javascript"> + $(function() { + $("#jsonTable").fancyTable({ + sortColumn: 0, + pagination: true, + exactMatch: "auto", + perPage: 20 + }); + }); + </script> +</body> +</html> +""" + + +def get_year_from_era(era): + """ Go from '2017'/'2018UL'/'2016ULpreVFP' to '17'/'18'/'16' """ + return re.search(r"20([0-9]+).*", era).group(1) + + +def get_run_from_era(era): + year, camp = era.split("_") + if int(get_year_from_era(year)) <= 18: + return year, camp, "Run2" + else: + return year, camp, "Run3" + + +def generate_json_summary(inPath, outPath): + with open(os.devnull, "w") as devnull: + console = Console(width=100, file=devnull, record=True) + cset = model_auto(open_auto(str(inPath))) + console.print(cset) + console.save_html(str(outPath)) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input", required=True, help="input jsonpog-integration POG folder") + parser.add_argument("-o", "--output", required=True, help="output folder for html pages") + args = parser.parse_args() + + out_dir_p = pl.Path(args.output) + summary_dir_p = out_dir_p / "summaries" + summary_dir_p.mkdir(parents=True, exist_ok=True) + + root_dir_p = pl.Path(args.input) + files = [] + + for pog_dir_p in root_dir_p.iterdir(): + if not pog_dir_p.is_dir(): continue + pog = pog_dir_p.name + + for era_dir_p in pog_dir_p.iterdir(): + if not era_dir_p.is_dir(): continue + + era = era_dir_p.name + year, campaign, run = get_run_from_era(era) + + for json_file_p in era_dir_p.iterdir(): + if json_file_p.suffixes not in [[".json"], [".json", ".gz"]]: continue + file_name = json_file_p.name.split(".")[0] + + summary_file_p = summary_dir_p / f"{pog}_{era}_{file_name}.html" + print(f"Generating HTML summary for {json_file_p}") + generate_json_summary(json_file_p, summary_file_p) + + files.append({ + "POG": pog, + "Era": year, + "Campaign": campaign, + "LHC Run": run, + "File summary": f'<a href="summaries/{summary_file_p.name}" target="_blank">{file_name}</a>' + }) + + print("Generating index page") + + files = pd.DataFrame(files) + + index_p = out_dir_p / "index.html" + index_p.write_text( + template.replace("#TABLE#", + files.to_html(index=False, table_id="jsonTable", escape=False) + ) + ) + + # also install javascript + script_dir_p = pl.Path(__file__).parent / "fancyTable" + shutil.copytree(script_dir_p, out_dir_p / "fancyTable", dirs_exist_ok=True) + diff --git a/jsonpog-integration/script/gitlab_post_comment.py b/jsonpog-integration/script/gitlab_post_comment.py new file mode 100755 index 0000000000000000000000000000000000000000..748af176cb4f1ece337cbda0cc0a2bbfb0be9e8b --- /dev/null +++ b/jsonpog-integration/script/gitlab_post_comment.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 +import os +import sys +from urllib import parse, request + +iid = os.environ['CI_MERGE_REQUEST_IID'] +project = os.environ['CI_MERGE_REQUEST_PROJECT_PATH'].replace("/", "%2F") + +if 'GITLAB_API_TOKEN' in os.environ: + token = os.environ['GITLAB_API_TOKEN']; +else: + token=("".join(open("%s/private/gitlab-token" % os.environ['HOME']))).strip() + +review_body = "" +for f in sys.argv[1:]: + if os.path.isfile(f): + review_body += "".join(open(f)) + "\n" + +blob = parse.urlencode({'body': review_body.replace("\n", "\r\n").replace('"', '\"').encode('utf-8')}) +blob = blob.encode('utf-8') +req = request.Request(f"https://gitlab.cern.ch/api/v4/projects/{project}/merge_requests/{iid}/notes", blob, headers={'PRIVATE-TOKEN': token}) +req.get_method = lambda: 'POST' +response = request.urlopen(req) +data = response.read().decode('utf-8') +print(data) + diff --git a/jsonpog-integration/script/testMR.sh b/jsonpog-integration/script/testMR.sh new file mode 100755 index 0000000000000000000000000000000000000000..70df5c80db71fea55b176cb738418ec94b8b3ed8 --- /dev/null +++ b/jsonpog-integration/script/testMR.sh @@ -0,0 +1,103 @@ +#!/bin/bash + +NOCHANGED_FILES=() +ADDED_FILES=() +CHANGED_FILES=() +BAD_CHANGED_FILES=() +BROKEN_FILES=() + +HEAD=$1 # path to the merge request +MASTER=$2 # path to the clean clone + +echo -e "## Validation errors\n" >> ${SCHEMA_REPORT} +echo -e "## Summary of changes\n" >> ${CHANGE_REPORT} + +STATUS=0 + +# make sure tee'ing commands doesn't hide exit status +set -o pipefail + +function validate() { + VALIDATION=$(correction validate --version 2 $1) + if [[ $? -ne 0 ]]; then + echo + echo "######### VALIDATION ERROR in $1 #########" + echo ${VALIDATION} + echo "################################################" + echo ${VALIDATION} >> ${SCHEMA_REPORT} + BROKEN_FILES+=($1) + STATUS=1 + return 1 + fi + return 0 +} + +for i in $(find ${HEAD}/POG -name "*.json*"); do + echo + if [[ -s ${MASTER}/$i ]]; then + # file already exists in master + if cmp --silent ${MASTER}/$i $i; then + echo "No changes in "$i" wrt cms-nanoAOD/jsonpog-integration.git. " + NOCHANGED_FILES+=($i) + else + echo -e "\nThere are changes in $i wrt cms-nanoAOD/jsonpog-integration.git. " + if validate $i; then + script/compareFiles.py ${MASTER}/$i $i 2>&1 | tee -a ${CHANGE_REPORT} + if [[ $? -ne 0 ]]; then + BAD_CHANGED_FILES+=($i) + STATUS=1 + else + CHANGED_FILES+=($i) + fi + fi + fi + else + echo "New file found in $i" + if validate $i; then + echo "-------------- summary of new file -----------------------------------" + echo -e "### New valid file was added: $i\n" >> ${CHANGE_REPORT} + correction summary $i | tee -a ${CHANGE_REPORT} + echo "----------------------------------------------------------------------------" + ADDED_FILES+=($i) + fi + fi +done + +function print_array { + printf '`' + local d='`, `' + local f=${1-} + if shift 1; then + printf %s "$f" "${@/#/$d}" + fi + printf '`' +} + +echo + +echo -e "### Summary of changes\n" | tee -a ${SUMMARY_REPORT} + +if (( ${#BAD_CHANGED_FILES[@]} )); then + echo " * Files changed and schema OK but problems found with content, see \`${CHANGE_REPORT}\`: $(print_array ${BAD_CHANGED_FILES[@]})" | tee -a ${SUMMARY_REPORT} +fi +if (( ${#CHANGED_FILES[@]} )); then + echo " * Files changed (tests passed), see \`${CHANGE_REPORT}\`: $(print_array ${CHANGED_FILES[@]})" | tee -a ${SUMMARY_REPORT} +else + echo " * No existing file changed (that passed schema test)." | tee -a ${SUMMARY_REPORT} +fi + +if (( ${#ADDED_FILES[@]} )); then + echo " * Files added (schema test passed), see \`${CHANGE_REPORT}\`: $(print_array ${ADDED_FILES[@]})\n" +else + echo " * No file added (that passed schema test)." | tee -a ${SUMMARY_REPORT} +fi + +if (( ${#BROKEN_FILES[@]} )); then + echo " * Broken files found (schema test failed), see \`${SCHEMA_REPORT}\`: $(print_array ${BROKEN_FILES[@]})" | tee -a ${SUMMARY_REPORT} +else + echo " * No file fails schema test." | tee -a ${SUMMARY_REPORT} +fi + +echo "Done." + +exit ${STATUS} diff --git a/jsonpog-integration/script/validateAll.sh b/jsonpog-integration/script/validateAll.sh new file mode 100755 index 0000000000000000000000000000000000000000..9514b5197fadb8c19957fc10f0fdb655a3967a93 --- /dev/null +++ b/jsonpog-integration/script/validateAll.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +BROKEN_FILES=() + +SOURCE=$1 # path to the root of the repository + +STATUS=0 + +for i in $(find ${SOURCE}/POG -name "*.json*"); do + # Validate files + VALIDATION=$(correction validate --version 2 $i) + if [[ ${?: -1} -ne 0 ]]; then + echo + echo "######### ERROR in "$i" #########" + echo ${VALIDATION} + echo "#################################################################" + echo + BROKEN_FILES+=($i) + STATUS=1 + fi +done + +if (( ${#BROKEN_FILES[@]} )); then + echo "Broken files: ${BROKEN_FILES[@]}\n" +else + echo "No broken files.\n" +fi + +echo "Done." + +exit ${STATUS}