diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 304dec8eca4f0763048250d810d1a6745c42fe23..a634766fd584e04b5dd0b6542bc774d8a6c20d97 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,5 +1,5 @@ ############################################################################### -# (c) Copyright 2018-2021 CERN for the benefit of the LHCb Collaboration # +# (c) Copyright 2018-2022 CERN for the benefit of the LHCb Collaboration # # # # This software is distributed under the terms of the GNU General Public # # Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # @@ -8,16 +8,29 @@ # granted to it by virtue of its status as an Intergovernmental Organization # # or submit itself to any jurisdiction. # ############################################################################### +stages: + - check + - build + - docs + - deploy + variables: TARGET_BRANCH: master + # Parameters for downloading nightlies and building DaVinci + BINARY_TAG: x86_64_v2-centos7-gcc11-opt + LCG_VERSION: 101 + NO_LBLOGIN: "1" # prevent lbdocker containers to start LbLogin/LbEnv + LB_NIGHTLY_SLOT: lhcb-master check-copyright: + stage: check image: gitlab-registry.cern.ch/ci-tools/ci-worker:cc7 script: - curl -o lb-check-copyright "https://gitlab.cern.ch/lhcb-core/LbDevTools/raw/master/LbDevTools/SourceTools.py?inline=false" - python lb-check-copyright origin/${TARGET_BRANCH} --exclude lhcbproject.yml check-formatting: + stage: check tags: - cvmfs image: gitlab-registry.cern.ch/ci-tools/ci-worker:cc7 @@ -37,6 +50,7 @@ check-formatting: expire_in: 1 week python-linting: + stage: check tags: - cvmfs script: @@ -44,6 +58,93 @@ python-linting: # Only run the pyflakes linter and a few select pycodestyle errors - flake8 --exclude '*.opts.py' --select=F,E71,E9,W1,W6 $(find . -name '*.py') +build: + stage: build + image: gitlab-registry.cern.ch/lhcb-core/lbdocker/centos7-build:latest + tags: + - cvmfs + script: + - . /cvmfs/lhcb.cern.ch/lib/LbEnv.sh + - . /cvmfs/lhcbdev.cern.ch/nightlies/${LB_NIGHTLY_SLOT}/latest/setupSearchPath.sh + # setupSearchPath.sh only sets CMTPROJECTPATH but we need CMAKE_PREFIX_PATH + - export CMAKE_PREFIX_PATH=${CMTPROJECTPATH}:${CMAKE_PREFIX_PATH} + - lb-project-init + - make CMAKEFLAGS=-DLOKI_BUILD_FUNCTOR_CACHE=OFF + - ln -s ./build.$BINARY_TAG/run run + # Allow failure as the nightly slot we depend on may have itself failed + allow_failure: true + artifacts: + untracked: true + expire_in: 1 hr + +build-docs: + stage: docs + image: gitlab-registry.cern.ch/lhcb-core/lbdocker/centos7-build:latest + tags: + - cvmfs + # Change pip's cache directory so we can cache it + variables: + PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" + before_script: + # As the previous stage is allowed to fail, skip this stage if the previous + # did not complete successfully (i.e. did not create the `run` symlink) + - test -L run || { echo "WARNING build job failed, stopping"; exit 77; } + script: + # First try producing just the graphs (gaudirun.py). If that fails, Moore + # is likely incompatible with the last nightly, so fail with a warning. + - ./run make -C doc graphs || { echo "WARNING build is incompatible with last nightly, stopping"; exit 77; } + #- reasons= + #- ./run python doc/make_functor_docs.py "${CI_COMMIT_SHA:0:8}" "master" > doc/selection/thor_functors_reference.generated.rst || reasons+='ERROR failed functor docs build\n' + - ./run make -C doc linkcheck || reasons+='ERROR failed link check\n' + - ./run make -C doc html || reasons+='ERROR failed html generation\n' + - if [ -n "$reasons" ]; then echo -e $reasons; exit 1; fi + allow_failure: + exit_codes: 77 + artifacts: + untracked: false + expire_in: 30 days + paths: + - doc/_build/html/ + cache: + key: "$CI_JOB_NAME" + paths: + - .cache/pip + +pages: + stage: deploy + needs: [build-docs] + rules: + - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + script: + - mv doc/_build/html public + artifacts: + untracked: false + expire_in: 30 days + paths: + - public + + # image: gitlab-registry.cern.ch/ci-tools/ci-web-deployer + # rules: + # - if: '$CI_COMMIT_REF_PROTECTED == "true"' + # variables: + # # Need to ensure that the path up to $CI_COMMIT_REF_NAME exists (the + # # deployment script does `mkdir`, not `mkdir -p`) + # EOS_PATH: "/eos/user/m/mamartin/www/davincidocs/$CI_COMMIT_REF_NAME" + # #EOS_PATH: "/eos/project/l/lhcbwebsites/www/projects/moore/$CI_COMMIT_REF_NAME" + # CI_OUTPUT_DIR: "doc/_build/html" + # # Makes sure files on the remote are deleted if missing from the source + # METHOD: "rsync" + # before_script: + # # As the previous stage is allowed to fail, skip this stage if the previous + # # did not complete successfully (i.e. did not create the html directory) + # - test -d $CI_OUTPUT_DIR || { echo "WARNING docs not built, stopping"; exit 77; } + # script: + # #- deploy-eos && echo -e "Deployed at\n\n\thttps://lhcbdoc.web.cern.ch/lhcbdoc/moore/$CI_COMMIT_REF_NAME/index.html" + # - deploy-eos && echo -e "Deployed at\n\n\thttps://mamartin.web.cern.ch/davincidocs/$CI_COMMIT_REF_NAME/index.html" + # allow_failure: + # exit_codes: 77 + + include: - project: 'lhcb-rta/reference-update-bot' file: 'templates/ref-bot-ci-trigger-template.yml' diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..17f01049eda490e7a497181508be0c177e2d110b --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,81 @@ +############################################################################### +# (c) Copyright 2022 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for SPHINXOPTS. +SPHINXPREFIX = _env.$(BINARY_TAG) +SPHINXOPTS ?= +SPHINXBUILD = $(SPHINXPREFIX)/bin/sphinx-build +SPHINXVERSION = $(SPHINXPREFIX)/bin/sphinx-versioning +RUNENV = PYTHONPATH=$(SPHINXPREFIX):$(PYTHONPATH) +SOURCEDIR = . +BUILDDIR = _build +GIT_COMMIT = $(shell git rev-parse --short HEAD) + +GRAPH_SCRIPTS := $(wildcard scripts/*.py) +CONTROL_GRAPH_SOURCES := $(patsubst scripts/%.py, graphviz/%_control_flow.gv, $(GRAPH_SCRIPTS)) +DATA_GRAPH_SOURCES := $(patsubst scripts/%.py, graphviz/%_data_flow.gv, $(GRAPH_SCRIPTS)) + +# Put it first so that "make" without argument is like "make help". +help: $(SPHINXBUILD) + $(RUNENV) $(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +# we install certifi (matching the LCG version) so that we can patch +# cacert.pem (== `python -m certifi`) +$(SPHINXBUILD): requirements.txt Makefile + python -m pip install --disable-pip-version-check --no-warn-conflicts --ignore-installed \ + --target=$(SPHINXPREFIX) -r $< \ + certifi==`python -c 'import certifi; print(certifi.__version__)'` + @touch $@ + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +#html pdf: Makefile graphs functor_docs $(SPHINXBUILD) +html pdf: Makefile graphs $(SPHINXBUILD) + $(RUNENV) $(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) +#$(RUNENV) $(SPHINXVERSION) build -r mamartin-docs . _build/html +#-M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O); + +# versioning +# version: Makefile graphs $(SPHINXBUILD) +# $(RUNENV) $(SPHINXVERSION) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) +# sphinx-versioning build -r doc doc/_build/html + +#linkcheck: Makefile graphs functor_docs $(SPHINXBUILD) +linkcheck: Makefile graphs $(SPHINXBUILD) + cat _certificates/*.crt >> `$(RUNENV) python -m certifi` # install CERN CA certificates + $(RUNENV) $(SPHINXBUILD) -b linkcheck "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +clean: Makefile $(SPHINXBUILD) + rm -rf graphviz + $(RUNENV) $(SPHINXBUILD) -M clean "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +purge: + rm -rf $(SPHINXPREFIX) $(BUILDDIR) graphviz + +graphs: $(DATA_GRAPH_SOURCES) $(CONTROL_GRAPH_SOURCES) + +# # Generate graphs with names based on the options file that creates them +# graphviz/%_data_flow.gv graphviz/%_control_flow.gv: scripts/%.py +# mkdir -p graphviz +# gaudirun.py -n $< +# mv data_flow.gv graphviz/$*_data_flow.gv +# mv control_flow.gv graphviz/$*_control_flow.gv + +#functor_docs: selection/thor_functors_reference.generated.rst + +# selection/thor_functors_reference.generated.rst: make_functor_docs.py +# python make_functor_docs.py "$(GIT_COMMIT)" "master" > "$@" + +#.PHONY: help html pdf clean graphs functor_docs purge +.PHONY: help html pdf clean graphs purge diff --git a/doc/_certificates/CERN Grid Certification Authority.crt b/doc/_certificates/CERN Grid Certification Authority.crt new file mode 100644 index 0000000000000000000000000000000000000000..84605e95f79ec8724a6ad6fd46c61b1971276375 --- /dev/null +++ b/doc/_certificates/CERN Grid Certification Authority.crt @@ -0,0 +1,53 @@ +-----BEGIN CERTIFICATE----- +MIIJdjCCB16gAwIBAgIKYZhqPwAAAAAAAzANBgkqhkiG9w0BAQ0FADBKMQswCQYD +VQQGEwJjaDENMAsGA1UEChMEQ0VSTjEsMCoGA1UEAxMjQ0VSTiBSb290IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5IDIwHhcNMTMwNDIyMTExMDE2WhcNMjMwNDIyMTEy +MDE2WjBWMRIwEAYKCZImiZPyLGQBGRYCY2gxFDASBgoJkiaJk/IsZAEZFgRjZXJu +MSowKAYDVQQDEyFDRVJOIEdyaWQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDS9Ypy1csm0aZA4/QnWe2oaiQI +LqfeekV8kSSvOhW2peo5cLNIKbXATOo1l2iwIbCWV8SRU2TLKxHIL8fAOJud5n9K +mEKBew7nzubl1wG93B4dY0KREdb3/QB/7OkG8ZZvLqrvQZVGT1CgJ+NFFUiJ315D +FWkKctZv27LjQamzCxpX+gZSsmwZmSReY67cnm6P7z+/3xVNhwb+4Z+1Ww4vHhMc +dh1Dsrkv9vXU01UN752QtQ6l56uQLYEB2+vaHB6IpyC9zAQ/33GulCq8Gbj7ykPd +9AcRVBeJAErSK+oMHThtdLD7mhTkZivakaNe4O1EhPFH0rWwV45IFN7ipELA5qDx +djdzo6JtLJQMaSV/TV+amEf2CaKlD0giqGhjfSNiOX5HCmpqV14kbl+7Qho6ykZy +b1DGpf70yILnX+AUtdpd8lulTu1yg1Bg5cFQskUIk5+s4nsC1VpmeNxYaeFEcYZj +Ph2mdD7zLo889MtF7kZv7+6J6p4NBL3fQ9Os8/h8XVlfDatzbpVH4jYKKAd4nwJb +knJaKPE0LzLzVfJBwnDxqe8hb64gI8Frludp+jaOYzvMqlzAe9z4a9971iXIWaaG +unbAoEkXj69y7MsvCjWXB7o9HdBaS9FL+ZtXTKCyXl+XLFseYQoQburKr+eTcRed +KLJNj4tRF1799PO69wIDAQABo4IEUDCCBEwwEAYJKwYBBAGCNxUBBAMCAQAwHQYD +VR0OBBYEFKWg/WZY/bndeuGynZ+j0eVQGJTnMIIBLQYDVR0gBIIBJDCCASAwggEc +BgorBgEEAWAKBAEBMIIBDDCBvgYIKwYBBQUHAgIwgbEega4AQwBFAFIATgAgAEcA +cgBpAGQAIABDAGUAcgB0AGkAZgBpAGMAYQB0AGkAbwBuACAAQQB1AHQAaABvAHIA +aQB0AHkAIABDAGUAcgB0AGkAZgBpAGMAYQB0AGUAIABQAG8AbABpAGMAeQAgAGEA +bgBkACAAQwBlAHIAdABpAGYAaQBjAGEAdABlACAAUAByAGEAYwB0AGkAYwBlACAA +UwB0AGEAdABlAG0AZQBuAHQwSQYIKwYBBQUHAgEWPWh0dHA6Ly9jYWZpbGVzLmNl +cm4uY2gvY2FmaWxlcy9jcC1jcHMvY2Vybi1ncmlkLWNhLWNwLWNwcy5wZGYwGQYJ +KwYBBAGCNxQCBAweCgBTAHUAYgBDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHwYDVR0jBBgwFoAU+nv73psNo/JSt2ztWw8Loqam+AQwggFEBgNVHR8E +ggE7MIIBNzCCATOgggEvoIIBK4ZSaHR0cDovL2NhZmlsZXMuY2Vybi5jaC9jYWZp +bGVzL2NybC9DRVJOJTIwUm9vdCUyMENlcnRpZmljYXRpb24lMjBBdXRob3JpdHkl +MjAyLmNybIaB1GxkYXA6Ly8vQ049Q0VSTiUyMFJvb3QlMjBDZXJ0aWZpY2F0aW9u +JTIwQXV0aG9yaXR5JTIwMixDTj1DRVJOUEtJUk9PVDAyLENOPUNEUCxDTj1QdWJs +aWMlMjBLZXklMjBTZXJ2aWNlcyxDTj1TZXJ2aWNlcyxDTj1Db25maWd1cmF0aW9u +LERDPWNlcm4sREM9Y2g/Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdD9iYXNlP29i +amVjdENsYXNzPWNSTERpc3RyaWJ1dGlvblBvaW50MIIBRAYIKwYBBQUHAQEEggE2 +MIIBMjBnBggrBgEFBQcwAoZbaHR0cDovL2NhZmlsZXMuY2Vybi5jaC9jYWZpbGVz +L2NlcnRpZmljYXRlcy9DRVJOJTIwUm9vdCUyMENlcnRpZmljYXRpb24lMjBBdXRo +b3JpdHklMjAyLmNydDCBxgYIKwYBBQUHMAKGgblsZGFwOi8vL0NOPUNFUk4lMjBS +b290JTIwQ2VydGlmaWNhdGlvbiUyMEF1dGhvcml0eSUyMDIsQ049QUlBLENOPVB1 +YmxpYyUyMEtleSUyMFNlcnZpY2VzLENOPVNlcnZpY2VzLENOPUNvbmZpZ3VyYXRp +b24sREM9Y2VybixEQz1jaD9jQUNlcnRpZmljYXRlP2Jhc2U/b2JqZWN0Q2xhc3M9 +Y2VydGlmaWNhdGlvbkF1dGhvcml0eTANBgkqhkiG9w0BAQ0FAAOCAgEAQjzXhTV8 +d+6HaLqSnp7k9whxK6E75BZQJNR2Q/rslhhwijs6nekBjb+JPgmM6M0a7ra+D1Oi +4wKaWiCvU9yleZZSqfEkRl7WK9trRYXHqkqVSnmwNJNsediqioBBDHn/ZMnyc25Z +OLbM+99Z+awvoMbyPy0moUrR7ZqKi3C02N2mkiidO0m3bYnXKwxDUvka5n06oLnI +YSZfwFNJ7IEvSSF4mEzdDeQI+A+87+deb5XOTXee8i1ZUyI08Cg6tuZ8W6NdvY7t ++5iNxRmZJ6DBVwrvXutz0JSqklBCw267osEpX0AKGSL9fE2yGlWBX8WfDLB43lVE +z/HP7kQwYEmsfnfT2yTLzkMJrHSeR0Zymm/oB3amZziKex4kGk+/v7yV1pSYKJce +9QDZE+LYio/ndz01sejMPS87prYJqnII5hDYUjg9F1CoaejhjOlpmCU/10wyEVN0 +nhSP9Wc5z0+lhzU5C1A9r1gXQMuqCA2e7Cv5wv+r9dS+12Uly52jwmYf8mm6H0ZY +LZQbvMayHebD4WCnB7HNdp2Va4z5JrLvwG3J1EXfTjWiPhqOweevOg0rc6t2yhkM +iB9RXMlFoFzbsuE/4Z4Hd0GQcDijcnWJ/VbT15OD2C16yyBiLvu88nXX1gKuOzxL +vu4cw9FOuQZo147y9KPelpUT/SO+nrePzVs= +-----END CERTIFICATE----- diff --git a/doc/_certificates/CERN Root Certification Authority 2.crt b/doc/_certificates/CERN Root Certification Authority 2.crt new file mode 100644 index 0000000000000000000000000000000000000000..646831be5f3bea5de97095cab841af30a9c73a45 --- /dev/null +++ b/doc/_certificates/CERN Root Certification Authority 2.crt @@ -0,0 +1,38 @@ +-----BEGIN CERTIFICATE----- +MIIGqTCCBJGgAwIBAgIQAojDcLlcbrhBX0qrEka4mzANBgkqhkiG9w0BAQ0FADBK +MQswCQYDVQQGEwJjaDENMAsGA1UEChMEQ0VSTjEsMCoGA1UEAxMjQ0VSTiBSb290 +IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IDIwHhcNMTMwMzE5MTI1NTM2WhcNMzMw +MzE5MTMwNTM0WjBKMQswCQYDVQQGEwJjaDENMAsGA1UEChMEQ0VSTjEsMCoGA1UE +AxMjQ0VSTiBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IDIwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDxqYPFW2qVVi3Rw1NKlEf7x70xF+6a8uE/ +Tu4ZVQF/K2RXI95QLkYfKItZvy9Az3ib/VlUho5f8fBaqy4n70uwC7+qd3Aq1/xQ +ysykPCbBBAsOSQQpTlhrMD2V5Ya9zrirphOhutddiqV96zBCyMM+Gz5uYv9u+cm4 +tg1EOmAMGh2UNxfTFNVmXKkk7eFTSC1+zgb28H6nd3xzV27sn9bfOfGh//ZPy5gm +Qx0Oh/tc6WMreWzRZBQm5SJiK0QOzPv09p5WmdY2WxZoqNTFBDACQO7ysFOktc74 +fPVFX/lmt4jFNSZRIOvvaACI/qlEaAJTR4FHIY9uSMsV8DrtzhI1Ucyv3kqlQpbF +jDouq44IryA/np4s/124bW+x8+n/v+at/AxPjvHBLiGhB+J38Z6KcJogoDnGzIXR +S+YUr/vGz34jOmkRuDN5STuuAXzyCKFXaoAm0AwjTziIv3E0jxC1taw6FpKevnd1 +CLsTLAEUiEjzStFkDhd/Hpipc57zmMFY8VYet2wVqSFjnt2REWOVbZlbCiMHmSeD +u5EuZLiU8xlkiaCfn4A5XZ6X0qprbgDviGJtwxzNvTg7Hn0ziW5/ELryfQXCwZJ+ +FVne8Zu8sbgy/sDkX+pyFuyB4XgiM0eMNkoexIXJaRdlMWDIL5ysiIXQKjhynAv5 +KLHbRjciVwIDAQABo4IBiTCCAYUwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQFMAMB +Af8wHQYDVR0OBBYEFPp7+96bDaPyUrds7VsPC6KmpvgEMBAGCSsGAQQBgjcVAQQD +AgEAMIIBMgYDVR0gBIIBKTCCASUwggEhBgorBgEEAWAKBAEBMIIBETCBwgYIKwYB +BQUHAgIwgbUegbIAQwBFAFIATgAgAFIAbwBvAHQAIABDAGUAcgB0AGkAZgBpAGMA +YQB0AGkAbwBuACAAQQB1AHQAaABvAHIAaQB0AHkAIAAyACAAQwBlAHIAdABpAGYA +aQBjAGEAdABlACAAUABvAGwAaQBjAHkAIABhAG4AZAAgAEMAZQByAHQAaQBmAGkA +YwBhAHQAZQAgAFAAcgBhAGMAdABpAGMAZQAgAFMAdABhAHQAZQBtAGUAbgB0MEoG +CCsGAQUFBwIBFj5odHRwOi8vY2FmaWxlcy5jZXJuLmNoL2NhZmlsZXMvY3AtY3Bz +L2Nlcm4tcm9vdC1jYTItY3AtY3BzLnBkZjANBgkqhkiG9w0BAQ0FAAOCAgEAo0Px +l4CZ6C6bDH+b6jV5uUO0NIHtvLuVgQLMdKVHtQ2UaxeIrWwD+Kz1FyJCHTRXrCvE +OFOca9SEYK2XrbqZGvRKdDRsq+XYts6aCampXj5ahh6r4oQJ8U7aLVfziKTK13Gy +dYFoAUeUrlNklICt3v2wWBaa1tg2oSlU2g4iCg9kYpRnIW3VKSrVsdVk2lUa4EXs +nTEJ30OS7rqX3SdqZp8G+awtBEReh2XPhRgJ6w3xiScP/UdWYUam2LflCGX3RibB +/DZhgGHRRoE4/D0kQMP2XTz6cClbNklECTlp0qZIbiaf350HbcDEFzYRSSIi0emv +kRGcMgsi8yTTU87q8Cr4hETxAF3ZbSVNC0ZaTZ8RBbM9BXguhYzKkVBgG/cMpUjs +B6tY2HMZbAZ3TKQRb/bRyUigM9DniKWeXkeL/0Nsno+XbcpAqLjtVIRwCg6jTLUi +1NRsl3BP6C824dVaoI8Ry7m+o6O+mtocw4BMhHfTcoWCO8CWjT0ME67JzaAYa5eM ++OqoWtgbgweBlfO0/3GMnVGMAmI4FlhH2oWKWQgWdgr0Wgh9K05VcxSpJ87/zjhb +MQn/bEojWmp6eUppPaqNFcELvud41qoe6hLsOYQVUQ1sHi7n6ouhg4BAbwS2iyD2 +uiA6FHTCeLreFGUzs5osPKiz3GE5D6V9she9xIQ= +-----END CERTIFICATE----- diff --git a/doc/_ext/graphviz_linked.py b/doc/_ext/graphviz_linked.py new file mode 100644 index 0000000000000000000000000000000000000000..35251b6d64e5591bbbe7e474194367d4f3e5710f --- /dev/null +++ b/doc/_ext/graphviz_linked.py @@ -0,0 +1,68 @@ +############################################################################### +# (c) Copyright 2019-2022 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +"""Sphinx extension that wraps sphinx.ext.graphviz HTML output. + +The HTML translation handler of sphinx.ext.graphviz is replaced with a wrapper +`html_visit_graphviz_wrapped`. This wraps the HTML produced by +`html_visit_graphviz` with an anchor tag that links to the source of the +original `img`. This is useful is the image to too large to be seen clearly +when embedded in the page. + +This extension must be specified in the configuration `extensions` list *after* +sphinx.ext.graphviz. +""" +import logging +import re + +from docutils import nodes +from sphinx.ext.graphviz import html_visit_graphviz + +logger = logging.getLogger(__name__) + + +def html_visit_graphviz_wrapped(self, *args, **kwargs): + format = self.builder.config.graphviz_output_format + if format != "png": + # This raises nodes.SkipNode, so execution will stop here + html_visit_graphviz(self, *args, **kwargs) + + anchor_index = len(self.body) + anchor_tmp = "REPLACE_ME" + self.body.append('<a href="{}">'.format(anchor_tmp)) + try: + html_visit_graphviz(self, *args, **kwargs) + except nodes.SkipNode: + # Catch the exception so we can extract and insert the image URL and + # append the anchor ending before Sphinx continues processing + src = "" + for line in self.body: + if 'src="' in line: + src, = re.match(".*src=[\"']([^\'\"]+)", line).groups() + + if src: + self.body[anchor_index] = self.body[anchor_index].replace( + anchor_tmp, src) + else: + logger.warning("Could not find Graphviz URL for injection.") + self.body.append("</a>") + + raise nodes.SkipNode + + +def setup(app): + app.registry.translation_handlers["html"]["graphviz"] = ( + html_visit_graphviz_wrapped, None) + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/doc/conf.py b/doc/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..b4b1d0482b4d1b48ce2635f38990981068de8fb8 --- /dev/null +++ b/doc/conf.py @@ -0,0 +1,201 @@ +############################################################################### +# (c) Copyright 2022 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. + +import datetime +import os +import sys + +sys.path.append(os.path.abspath("./_ext")) + +# -- Project information ----------------------------------------------------- + +project = "DaVinci" +year = datetime.date.today().strftime("%Y") +copyright = f"2022-{year}, LHCb Collaboration" +author = "LHCb Collaboration" + +# -- General configuration --------------------------------------------------- + +master_doc = "index" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx_rtd_theme", + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", + "sphinx.ext.viewcode", + "sphinx.ext.graphviz", + "sphinx.ext.todo", + "graphviz_linked", +] + +# Assume unmarked references (in backticks) refer to Python objects +default_role = "py:obj" + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [ + "_*", + "Thumbs.db", + ".DS_Store", + #"make_functor_docs.py", + #"selection/thor_functors_reference.generated.rst", +] + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Global file metadata +html_context = { + "display_gitlab": True, + "gitlab_host": "gitlab.cern.ch", + "gitlab_user": "lhcb", + "gitlab_repo": "DaVinci", + "gitlab_version": "master/doc/", +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +add_module_names = False + +# A list of regular expressions that match URIs that should not be +# checked when doing a linkcheck build. +linkcheck_ignore = [ + # egroup links will give 403 + r"https://groups\.cern\.ch/group/lhcb-rta-selections/default\.aspx", + # really broken, see gaudi/Gaudi#156 + r"http://gaudi\.web\.cern\.ch/gaudi/doxygen/master/index\.html", +] + +# THE CODE BELOW CAN BE USED TO GENERATE HTML VERSIONING OF THE DOCS +# COMMENTED TO TEST THE DEPLOYMENT OF THE WEBSITE + +# ############################ +# # SETUP THE RTD LOWER-LEFT # +# ############################ +# try: +# html_context +# except NameError: +# html_context = dict() +# html_context['display_lower_left'] = True + +# templates_path = ['_templates'] + +# if 'REPO_NAME' in os.environ: +# REPO_NAME = os.environ['REPO_NAME'] +# else: +# REPO_NAME = '' + +# # SET CURRENT_LANGUAGE +# if 'current_language' in os.environ: +# # get the current_language env var set by buildDocs.sh +# current_language = os.environ['current_language'] +# else: +# # the user is probably doing `make html` +# # set this build's current language to english +# current_language = 'en' + +# # tell the theme which language to we're currently building +# html_context['current_language'] = current_language + +# # SET CURRENT_VERSION +# from git import Repo +# repo = Repo(search_parent_directories=True) + +# if 'current_version' in os.environ: +# # get the current_version env var set by buildDocs.sh +# current_version = os.environ['current_version'] +# else: +# # the user is probably doing `make html` +# # set this build's current version by looking at the branch +# current_version = repo.active_branch.name + +# # tell the theme which version we're currently on ('current_version' affects +# # the lower-left rtd menu and 'version' affects the logo-area version) +# html_context['current_version'] = current_version +# html_context['version'] = current_version + +# # # POPULATE LINKS TO OTHER LANGUAGES COULD BE ADDED IF NEEDED +# # html_context['languages'] = [ +# # ('en', '/' + REPO_NAME + '/en/' + current_version + '/') +# # ] + +# # if os.path.isdir('locales'): +# # languages = [lang.name for lang in os.scandir('locales') if lang.is_dir()] +# # for lang in languages: +# # html_context['languages'].append( +# # (lang, '/' + REPO_NAME + '/' + lang + '/' + current_version + '/')) + +# # POPULATE LINKS TO OTHER VERSIONS +# html_context['versions'] = list() + +# versions = [branch.name for branch in repo.branches] +# for version in versions: +# html_context['versions'].append( +# (version, +# '/' + REPO_NAME + '/' + current_language + '/' + version + '/')) + +# # POPULATE LINKS TO OTHER FORMATS/DOWNLOADS +# #from https://tech.michaelaltfield.net/2020/07/23/sphinx-rtd-github-pages-2/ + +# # THE CODE BELOW COULD BE USED TO ADD LINKS TO PDF OR EPUB VERSIONS OF THE DOCUMENTATION +# # # settings for creating PDF with rinoh +# # rinoh_documents = [( +# # master_doc, +# # 'target', +# # project + ' Documentation', +# # '(c) ' + copyright, +# # )] +# # today_fmt = "%B %d, %Y" + +# # # settings for EPUB +# # epub_basename = 'target' + +# # html_context['downloads'] = list() +# # html_context['downloads'].append( +# # ('pdf', +# # '/' + REPO_NAME + '/' + current_language + '/' + current_version + '/' + +# # project + '-docs_' + current_language + '_' + current_version + '.pdf')) + +# # html_context['downloads'].append( +# # ('epub', +# # '/' + REPO_NAME + '/' + current_language + '/' + current_version + '/' + +# # project + '-docs_' + current_language + '_' + current_version + '.epub')) diff --git a/doc/configuration/davinci_configuration.rst b/doc/configuration/davinci_configuration.rst new file mode 100644 index 0000000000000000000000000000000000000000..1edb38a8af7d8067bcdd1474e473b2cd79f42304 --- /dev/null +++ b/doc/configuration/davinci_configuration.rst @@ -0,0 +1,402 @@ +DaVinci Configuration +===================== + +From ``DaVinci/v53r0`` the DaVinci configuration has been modernized and revisited in order to improve +the user accessibility and hide all technicalities the user doesn't need to deal with. +The two major changes with respect to the old configuration are: the general structure 'À la' +PyConf and the usage of the Click package to handle the job context and ensure that all the +job arguments are passed correctly to Gaudi. + +The main scripts based on the Click package are implemented in +`this file <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/DaVinciSys/scripts/davinci>`__, +while all the other files related to the Configuration itself are implemented in +`this directory <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/Phys/DaVinci/python/DaVinci/>`__. + +The DaVinci application can be run by means of two commands: the first one exploits the new Click-based script +:: + + ./run davinci [davinci_option] command [command_option] [extra_options] + +while the latter is the more compact command using the ``gaudirun.py`` script, developed for running DaVinci jobs on the Grid via DIRAC, +:: + + ./run gaudirun.py [options_file.py] + +Additional details on how to configure the DaVinci options with the two methods can be found in the following paragraphs. + +Click-based scripts +------------------- +The ``main`` function, +defined in `this file <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/DaVinciSys/scripts/davinci>`__ using the Click package, +takes a few input arguments to allow the user to run the job in any scenario (davinci_options): + + 1. ``--export``: a flag for exporting the options in a format suitable for gaudirun.py; a filename (with extention `.opts`) has to be provided as an argument + 2. ``--with-defaults``: a flag that explicitly includes default values of properties in the final configuration + 3. ``--dry-run``: a flag for processing the options but do not run the application + +When calling this function it invokes the ``resultcallback`` method which in this case is overwritten by the function ``run_job``. +A helper can be obtained by running ``./run davinci --help``. +:: + + @main.resultcallback() + def run_job(configurables, export=None, with_defaults=False, dry_run=None): + +``run_job`` has the aim of instantiating and running a new Gaudi application given a list of Gaudi configurables. +These configurables can be defined both as ``LHCbUserConfigurables`` and ``GaudiConfig2`` configurables +since a dedicated implementation allows proper handling and merging of both types. +This script is implemented in +`this file <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/Phys/DaVinci/python/DaVinci/utilities_script.py>`__. + +:: + + def run_mc(ctx, inputfiledb, joboptfile, simplejob): + +Both these methods accept a set of input arguments (``command_options``): + + #. ``--inputfiledb`` (also ``-i``): takes a pair of arguments defining the input files (key, fileDB), allowing DaVinci + to run the job with all the files listed in the fileDB database with the specified key. + + * The fileDB file (``.yaml``) will be created along with ntuples in the Analysis Production. + * To use the general ``TestFileDB``, + defined `here <https://gitlab.cern.ch/lhcb-datapkg/PRConfig/blob/master/python/PRConfig/TestFileDB.py>`__, + it is sufficient to put ``fileDB = '-'``. + * In addition, all the metadata values reported in the ``fileDB`` will be automatically + configured in the DaVinci job so that the user doesn't need to care anymore. + * A dedicated option flag named has to be activated by the users + in case they need to overwrite any metadata information. + + #. ``--joboptfile``: Option file containing the job information to be set (``.py``, ``.yaml``). + #. ``--simplejob``: flag for running a DaVinci job without any specific configuration (data/mc) (only PyConf). + +Click also allows adding any other argument when calling one of these two methods, +allowing the user to set any DaVinci option from the command line (``extra_options``). +These extra arguments are saved in the job context and then retrieved during the configuration. +For example, it is possible to set the maximum number of events to 100 adding ``--evt_max 100`` +to the command line. The full list of all the DaVinci options that can be set is reported +`here <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/Phys/DaVinci/python/DaVinci/options_default.py>`__, +along with their default values. + +DaVinci Configuration scripts +----------------------------- + +``davinci`` returns a list of configurables to be passed to Gaudi +by means of the (internally called) ``run_job`` function. This list of Configurables is created calling the ``run_davinci_app`` +function, implemented `here <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/Phys/DaVinci/python/DaVinci/ConfigurationUpgrade.py>`__. +This function takes a few arguments in input: + + 1. ``fileDB_key``: a key in the ``testfileDB``, + 2. ``fileDB_file``: a file with the ``testfileDB``, + 3. ``jobOpt_file``: a file containing the options to be used for running the job, + 4. ``override_data_options``: a flag for override the dataset properties found in the ``testfileDB``, + 5. ``simplejob``: a flag for running a job using only the ``PyConf`` configurations, + 6. ``ctx_args``: click context containing all the extra options. + 7. ``prod_conf``: a flag to be enable only when running DaVinci with ProdConf + +The ``fileDB_key``, ``fileDB_file`` and ``jobOpt_file`` arguments are meant to pass to DaVinci the options related to the job. In particular, the first two are related to input dataset information, e.g. ``data_type`` or ``simulation``, listed in the ``testfileDB`` file which can be the TestFileDB or a TestFileDB-like YAML file. The latter argument specifies a .py or .yaml file containing a dictionary with all the job options, e.g: ``evt_max`` or ``enable_unpack``. +In case the user wants to override any dataset option found in the ``testfileDB``, the new value can be defined in the ``jobOpt_file`` as any other DaVinci property but the ``override_data_options`` flag has to be activated otherwise DaVinci will return an error preventing the job from running. +DaVinci will read the ``testfileDB`` and the ``jobOpt_File`` and store all the values in the ``DaVinci.options`` object, representing an instance of ``PyConf.application.ApplicationOptions``. +There are also two other methods for setting the property values in DaVinci: + + 1. by means of the ``ctx_args`` argument, passing direcly the value of the property of interest via command line, + e.g. ``--evt_max 100``, + 2. in the option file containing the user algorithms to be run in the job, importing the ``DaVinci.options`` and + setting values of the properties of interest, e.g. + :: + + from DaVinci import options + options.evt_max = 100 + +DaVinci will set the value of the properties following a specific source order: + + 1. `option_defaults.py <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/Phys/DaVinci/python/DaVinci/option-defaults.py>`__ script, + 2. ``fileDB_file`` argument related to the input data of interest, + 3. ``jobOpt_file`` argument if specified, + 4. ``ctx_args`` argument if passed by command line, + 5. user algorithm file if defined by the user. + +Finally, DaVinci checks that the final value of each property is valid and finalizes the configuration using the ``configure_input`` and ``configure`` methods developed in the PyConf packages, +configuring the input files with the corresponding CondDB and DDDB tags and all the default tools and algorithms needed by a general LHCb job. + +If the DaVinci ``simplejob`` flag is not active, +DaVinci performs additional steps adding to the configuration the ``HltANNSvc`` service, +the unpacking algorithms, +the event pre-filters +and the function for writing generator and lumi FSRs. +The DaVinci control flow is governed by a structure completely similar to the one used in ``PyConf``, implemented +`here <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/Phys/DaVinci/python/DaVinci/config.py>`__ and `here <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/Phys/DaVinci/python/DaVinci/application.py>`__. + + +How to run a DaVinci job +------------------------ + +The DaVinci application can be run via a command-line as follows: +:: + + ./run davinci [davinci_option] command [command_option] [extra_options] + +The parts of interest are the options you can pass to ``davinci`` and ``command``, which define the configuration of the job to be executed. + + +Then you will need to define a database storing the path to reach the input files along with the relevant metadata information, that will be imported automatically by DaVinci. +You can of course use the general TestFileDB, +but you can also create a new one similar to +`this example <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/Phys/DaVinci/options/DaVinciDB-Example.yaml>`__ +containing only the files you need. +The mandatory elements of this database are: + + 1. a ``key`` to identify the files + 2. the ``filenames`` with the path to all your input files + 3. all the ``qualifiers`` you want will be set in DaVinci + + 1. data_type + 2. input_type + 3. simulation + 4. conddb_tag + 5. dddb_tag + +As a minimal example we can create a new database named ``newDB.yaml`` with a unique key ``Upgrade_Bd2KstarMuMu_ldst``: +:: + + fileKey: + filenames: + - 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst' + qualifiers: + data_type: Upgrade + input_type: LDST + simulation: true + conddb_tag: sim-20171127-vc-md100 + dddb_tag: dddb-20171126 + + +The next step is to write an option file containing all the DaVinci options related to the job itself; +you can find an example `here <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/Phys/DaVinci/options/jobOptions-Example.yaml>`__. +Also in this case we can create an option file named ``newOpts.yaml`` with the following lines: +:: + + evt_max: 100 + skip_events: 2 + ntuple_file: 'ExampleTuple.root' + histo_file: 'ExampleHistos.root' + +You can add the following options to your command line in order to let DaVinci import the input files and all the option values reported in the database and in the job option file: +:: + + ./run davinci --inputfiledb fileKey ./newDB.yaml --joboptfile ./newOpts.yaml + + +Passing algorithms defined by the user +-------------------------------------- + +The algorithms defined by the user can be implemented in a .py file and passed to DaVinci via the ``user_algorithms`` property using the following notation: ``./path/to/userfile:main`` +where the ``:`` symbol separates the path to file ``./path/to/userfile.py`` from the name of the function ``main`` that has to be called. +The ``user_algorithms`` property can be passed to DaVinci both in the ``--joboptfile`` argument or via the command-line exploiting the extra ``ctx_args`` arguments. +In this way DaVinci will call the ``setup_user_algorithms`` function, implemented `here <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/Phys/DaVinci/python/DaVinci/algorithms.py>`__, +adding to the configuration all the algorithms and tools defined by the user in the ``main`` function. + +The ``main`` function, defined by the user, has to return two objects: a list of algorithms and a list of public tools that have to be used in the job. +In case the user needs to create multiple tuples with a different set of algorithms, the list of algorithms can +be replaced with a dictionary, where the key is the name of the tuple node and the element is the related list of algorithms. +Here is an example of a valid user algorithm file: +:: + + import Functors as F + from DaVinci.standard_particles import make_detached_mumu + from DaVinci.reco_objects import upfront_reconstruction_from_file as upfront_reconstruction + from FunTuple import FunctorCollection, FunTuple_Particles as Funtuple + + # Prepare the node with the selection + dimuons = make_detached_mumu() + + #FunTuple: Jpsi info + branches = {} + branches['Jpsi'] = 'J/psi(1S) -> mu+ mu-' + + variables_jpsi = FunctorCollection({ + 'THOR_PT': F.PT, + 'THOR_FourMom_P': F.FOURMOMENTUM + }) + + #associate FunctorCollection to branch name + variables = {} + variables['Jpsi'] = variables_jpsi + + #Configure Funtuple algorithm + tuple_dimuons = Funtuple( + name="DimuonsTuple", + tree_name="DecayTree", + branches=branches, + variables=variables, + inputs=dimuons) + + + def main(): + algs = upfront_reconstruction() + [dimuons, tuple_dimuons] + return algs, [] + + +A few more complete examples can be found in +`this directory <https://gitlab.cern.ch/lhcb/DaVinci/-/tree/master/DaVinciExamples/python/DaVinciExamples/tupling/>`__. + +In order to include this algorithm in the DaVinci job you can add the following option to the command line: +:: + + ./run davinci --inputfiledb fileKey ./newDB.yaml --joboptfile ./newOpts.yaml --user_algorithms ./myAlgs:main + +In case the database, the job option file, or the user algorithm file is saved in a different location you can add the full path in the command line. +You can also define the ``user_algorithms`` property in the ``./newOpts.yaml`` like any other DaVinci property instead of passing the information via command line, +just adding the following line to the .yaml file: +:: + + user_algorithms: ./myAlgs:main + + +Finally, another useful feature is that you can add in the command line any DaVinci job option to update run-time its value via command line: +:: + + ./run davinci --inputfiledb fileKey ./newDB.yaml --joboptfile ./newOpts.yaml --user_algorithms ./myAlgs:main --evt_max 20 + +for running the job over 20 events instead of the 100 defined in the job option file. + + +How to run DaVinci with gaudirun.py +----------------------------------- + +The DaVinci application can also be run using the gaudirun.py script: +:: + + ./run gaudirun.py myAlgs.py + +This second syntax has been implemented in order to be able to run DaVinci in production by means of ProdConf but it can also be exploited by the users if preferred to the click-based one. +In this case the myAlgs.py script is the one containing the user algorithms and tools that have to be used in the job. +Since the ``--joboptfile`` can not be passed in the command line, all the DaVinci options have to be set in myAlgs.py, importing +the DaVinci.options object. +The DaVinci application can be run calling the ``DaVinci.ConfigurationUpgrade.run_davinci_app()`` method at the end of the file. +This method takes in input the arguments listed :ref:`here<DaVinciApp_conf>`, but in this case the two important ones are the fileDB and the key related to the input files to be used in the job. +In case DaVinci has to be run in production, it is also needed to enable the corresponding ``prod_conf`` flag in order to allow a correct propagation of the job options from ProdConf to DaVinci. +Using as starting point the example used in the previous section, the updated myAlgs.py file for running DaVinci with gaudirun.py will look like: +:: + + import Functors as F + from DaVinci.standard_particles import make_detached_mumu + from DaVinci.reco_objects import upfront_reconstruction_from_file as upfront_reconstruction + from FunTuple import FunctorCollection, FunTuple_Particles as Funtuple + + # Prepare the node with the selection + dimuons = make_detached_mumu() + + #FunTuple: Jpsi info + branches = {} + branches['Jpsi'] = 'J/psi(1S) -> mu+ mu-' + + variables_jpsi = FunctorCollection({ + 'THOR_PT': F.PT, + 'THOR_FourMom_P': F.FOURMOMENTUM + }) + + #associate FunctorCollection to branch name + variables = {} + variables['Jpsi'] = variables_jpsi + + #Configure Funtuple algorithm + tuple_dimuons = Funtuple( + name="DimuonsTuple", + tree_name="DecayTree", + branches=branches, + variables=variables, + inputs=dimuons) + + + def main(): + algs = upfront_reconstruction() + [dimuons, tuple_dimuons] + return algs, [] + + + from DaVinci import options + options.enable_unpack = False + options.evt_max = 10 + options.histo_file = 'DV-example-his.root' + options.ntuple_file = 'DV-example-ntp.root' + options.user_algorithms = "./myAlgs:main" + options.write_fsr = False + + from DaVinci.ConfigurationUpgrade import run_davinci_app + fileDB_key = "Upgrade_Bd2KstarMuMu_ldst" + fileDB_path = "Phys/DaVinci/options/DaVinciDB-Example.yaml" + run_davinci_app(fileDB_key, fileDB_path, prod_conf=False) + + +Another example for running DaVinci with gaudirun.py can be found `here <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/DaVinciExamples/python/DaVinciExamples/tupling/option_davinci_tupling_from_hlt2_gaudirun.py>`__. + +How to add an event pre-filter +------------------------------ + +Event unpacking, FunTuple and many other algorithms run in a job can be very time consuming, so the processing of +the input files could be much faster and efficient if an event pre-filter is applied on top of the whole job chain +selecting only the events that satisfy a specific set of conditions. +For adding an event per-filter in a job one can set the value of the ``evt_pre_filters`` property with a dict +containing the an element for each filter you want to implement, where the key is the filter name and the object is a string +containing the filter code. Then DaVinci will automatically create a ``LoKi__HDRFilter`` or +``LoKi__VoidFilter`` instance according to filter code for each dict entry. +For example, the ``filter_code`` can exploit the ``HLT_PASS`` feature to ensure the trigger/sprucing corresponding to the input location fired in that specific event: +:: + + from DaVinci import options + options.evt_pre_filters = {"Hlt2TopoLineFilter": "HLT_PASS('Hlt2Topo2BodyLineDecision')"} + +A filter can also be instantiated using the ``add_filter`` function: +:: + + from DaVinci.algorithms import add_filter + filter = add_filter("Hlt2TopoLineFilter", "HLT_PASS('Hlt2Topo2BodyLineDecision')"). + +Additional examples can be found `here <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/DaVinciTests/tests/options/option_davinci_filters.py>`_ and `here <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/DaVinciExamples/python/DaVinciExamples/tupling/option_davinci_tupling_from_spruce.py>`__. +Additional information on how to implement a filter code can be found `here <https://twiki.cern.ch/twiki/bin/view/LHCb/FAQ/DaVinciFAQ#How_to_process_the_stripped_DSTs>`__ and `here <https://gitlab.cern.ch/lhcb/Phys/blob/master/Phys/PhysConf/python/PhysConf/Filters.py>`__. + +Using the ConfiguredFuntuple wrapper +------------------------------------ + +The ``ConfiguredFuntuple`` wrapper has been implemented to make easier the configuration of a FunTuple algorithm instance in DaVinci, combinining in a single step the instantiation of different objects: + + #. ``LoKi__HDRfilter``: to select only the events passing a specific set of trigger lines, + #. ``make_data_with_FetchDataFromFile``: to obtain the correct DataHandle object needed by FunTuple, + #. ``Funtuple``: object containing all the branches and variables defined by the user. + +Thanks to this wrapper the user can instatiate in the user file all three objects in the following way: +:: + + def main(): + from DaVinci.algorithms import ConfiguredFuntuple + + config = { + "location": "/Event/Spruce/SpruceB2OC_BdToDsmPi_DsmToHHH_Line/Particles", + "filters": ["HLT_PASS('SpruceB2OC_BdToDsmPi_DsmToHHH_LineDecision')"], + "preamble": ['TRACK_MAX_PT = MAXTREE(ISBASIC & HASTRACK, PT, -1)'], + "tree": "DecayTree", + "branches": branches, + "variables": variables, + } + + algs = ConfiguredFuntuple({"B0Dspi": config}) + return algs, [] + +``ConfiguredFuntuple`` takes as input a dictionary containing an entry for each FunTuple instance that has to be created, +where the the two elements are the FunTuple name and a configuration dictionary with the following information: + + - ``"location"``: string with input location to be used as input to FunTuple, + - ``"filters"``: list of filters to be applied in front of FunTuple, + - ``"preamble"``: list of LoKi functors to simplify the code that is used to fill the FunTuple leaves, + - ``"tree"``: name of the FunTuple tree, + - ``"branches"``: dictionary with the FunTuple braches, + - ``"variables"``: dictionary with the FunTuple variables for each branch. + +``ConfiguredFuntuple(...)`` returns a dictionary containing lists of all the algorithms that have to be implemented for running all the defined FunTuple instances. +This wrapper is meant to be used only for the simplest cases where no other algorithms have to be added between the HDRfilter and the FunTuple; +however, it can still be used as starting point for more complex wrappers. + +How to run a job on an XGEN file +-------------------------------- + +The DaVinci application can also be run over an XGEN (extended generator) file without any additional step. +The DaVinci application is able to add to the configuration only the MC unpacking algorithms when the ``input_type`` property is set to XGEN. +The new FunTupleMC algorithm is used to create the tuple. +A working example can be found `here <https://gitlab.cern.ch/lhcb/DaVinci/-/blob/master/DaVinciExamples/python/DaVinciExamples/tupling/option_davinci_tupling_from_xgen.py>`__. \ No newline at end of file diff --git a/doc/davinci/api_index.rst b/doc/davinci/api_index.rst new file mode 100644 index 0000000000000000000000000000000000000000..5f8548b961f2de1dca6590ed9ef01849d567f187 --- /dev/null +++ b/doc/davinci/api_index.rst @@ -0,0 +1,9 @@ + +DaVinci +======= + +.. toctree:: + + application + + diff --git a/doc/davinci/application.rst b/doc/davinci/application.rst new file mode 100644 index 0000000000000000000000000000000000000000..87938da36a498d409fffc038616bdcadfb5422c9 --- /dev/null +++ b/doc/davinci/application.rst @@ -0,0 +1,15 @@ +Application +=========== + +The ``DaVinci.options`` object holds an instance of +`PyConf.application.ApplicationOptions`. + +.. autoclass:: PyConf.application.ApplicationOptions + :members: + +The other members of the ``DaVinci`` module are used for high-level application +configuration. Most 'main' options files will call `DaVinci.run_davinci`. + +.. automodule:: DaVinci + + .. autofunction:: DaVinci.run_davinci diff --git a/doc/index.rst b/doc/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..3465be27776bc1f83b5c24214d661e144be2d657 --- /dev/null +++ b/doc/index.rst @@ -0,0 +1,45 @@ +Welcome to DaVinci's documentation! +=================================== + +DaVinci is the LHCb offline analysis application. +It allows the users to produce the tuples in which the relevant information +of the reconstructed particles of the decay of interest are stored. +Consider it as your way to access LHCb data! + +The main purpose of DaVinci is tupling. +You can use it in `Analysis Productions <https://gitlab.cern.ch/lhcb-datapkg/AnalysisProductions>`__, +for submitting your own productions with `Ganga <https://ganga.readthedocs.io/en/latest/>`__, +or for running small jobs on cvmfs systems, like lxplus at CERN. + +Nevertheless, since it gives you access to more detailed information about +the reconstructed particles, DaVinci also allows to perform more detailed +studies on the Sprucing or Turbo output. + +This site documents the various aspects of DaVinci, which is fundamentally a group +of Python packages that configure algorithms, tools, data flow, and control +flow in order to run a `Gaudi`_-based application that has full access to Sprucing +and Turbo output. + +.. _Gaudi: https://gitlab.cern.ch/gaudi/Gaudi + +.. toctree:: + :caption: User Guide + :maxdepth: 3 + + configuration/davinci_configuration + + tutorials/running + +.. toctree:: + :caption: API Reference + :maxdepth: 3 + + davinci/api_index + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/doc/make.bat b/doc/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..121ba49dc45b502721354e677a8c809ba013568b --- /dev/null +++ b/doc/make.bat @@ -0,0 +1,45 @@ +############################################################################### +# (c) Copyright 2022 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..031fb0a275ee90e7e21a5c76a20b7188203262eb --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,6 @@ +# FIXME sphinx >=3.4.0 crashes on ConfigurableUser classes (ApplicationOptions) +# since it considers dict __slots__ as name: docstring and for us the value +# means default value... https://github.com/sphinx-doc/sphinx/pull/8546 +sphinx==4.4.0 +sphinx_rtd_theme==1.0.0 +gitpython \ No newline at end of file diff --git a/doc/tutorials/running.rst b/doc/tutorials/running.rst new file mode 100644 index 0000000000000000000000000000000000000000..e223ef58d7b0890b916c8e317a959e994603f6fa --- /dev/null +++ b/doc/tutorials/running.rst @@ -0,0 +1,16 @@ +Running DaVinci +=============== + +Broadly speaking, DaVinci is a repository of Python files which can be used to +configure a Gaudi application. In this way, "running DaVinci" is the same as +running any other LHCb application (such as Brunel or Moore):: + + lb-run DaVinci/latest gaudirun.py some_options_file.py + + +Until `DaVinci/v53r0` the above was the only way to run DaVinci. +With that release a new way of running DaVinci is introduced:: + + lb-run DaVinci/latest davinci [davinci_option] command [command_option] [extra_options] + +This command exploits the potential of a Click-based script that allows to set all the options directly by command line. \ No newline at end of file