Commit 728147c4 authored by Aleksandra Mnich's avatar Aleksandra Mnich
Browse files

Merge branch 'dev' into 'master'

Version 1.6.0

See merge request !191
parents 34103671 79101686
Pipeline #4179031 passed with stages
in 15 minutes and 46 seconds
......@@ -14,7 +14,7 @@ stages:
- build
- pages
- deploy
- deploy_commons
- deploy_analysis
- notebooks_exec
doc:
......@@ -77,6 +77,7 @@ test_dev:
- apt-get -y update
- apt install -y openjdk-11-jdk
- pip install -r test-requirements.txt
- pip list
- pytest ${project_root}/${test_folder} --cov=${project_name} --junitxml=report.xml
- pytest --cov-report xml:coverage-reports/coverage-lhcsmapi.xml --cov=${project_name} ${project_root}/${test_folder}
......@@ -93,30 +94,18 @@ sonar:
script:
- bash CI/sonar.sh
deploy_production:
# Deploy package
stage: deploy
script:
- pip install twine
- python setup.py sdist
- twine upload --verbose --skip-existing dist/*
only:
- tags
deploy_production_eos:
stage: deploy
variables:
"EOS_PROJECT_PATH": "/eos/project/l/lhcsm"
"EOS_PUBLIC_PATH": "/eos/project/l/lhcsm/public/"
"SPARK3_EOS_VENV_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_REF_NAME}/nxcals_spark_3"
"SPARK2_EOS_VENV_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_REF_NAME}/nxcals_spark_2"
"COMMONS_EOS_VENV_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_REF_NAME}/commons"
"EOS_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_REF_NAME}/commons/lhcsmapi"
"VENV_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_REF_NAME}"
"EOS_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_REF_NAME}/lhcsmapi"
"CI_OUTPUT_DIR": "lhcsmapi"
"METHOD": "rsync"
"SPARK3_REQUIREMENTS_FILE": swan-nxcals-spark3-requirements.txt
"SPARK2_REQUIREMENTS_FILE": swan-nxcals-spark2-requirements.txt
"SCRIPT_FILENAME": "${CI_COMMIT_REF_NAME}.sh"
"SCRIPT_PATH": "/eos/project/l/lhcsm/public/packages_notebooks.sh"
"DEPLOY_PRO": "true"
# Only when pushed with tags
only:
......@@ -133,15 +122,13 @@ deploy_development_eos:
stage: deploy
variables:
"EOS_PUBLIC_PATH": "/eos/project/l/lhcsm/public/"
"SPARK3_EOS_VENV_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}/nxcals_spark_3"
"SPARK2_EOS_VENV_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}/nxcals_spark_2"
"COMMONS_EOS_VENV_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}/commons"
"EOS_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}/commons/lhcsmapi"
"VENV_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}"
"EOS_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}/lhcsmapi"
"CI_OUTPUT_DIR": "lhcsmapi"
"METHOD": "rsync"
"SPARK3_REQUIREMENTS_FILE": swan-nxcals-spark3-requirements.txt
"SPARK2_REQUIREMENTS_FILE": swan-nxcals-spark2-requirements.txt
"SCRIPT_FILENAME": "${CI_COMMIT_BRANCH}.sh"
"SCRIPT_PATH": "/eos/project/l/lhcsm/public/packages_notebooks_dev.sh"
# Only when pushed on a branch
only:
- branches
......@@ -158,6 +145,7 @@ deploy_development_eos:
delete_venv:
stage: deploy
image: gitlab-registry.cern.ch/ci-tools/ci-worker:cc7
variables:
GIT_STRATEGY: none
# EOS_MGM_URL: eosproject.cern.ch
......@@ -180,9 +168,9 @@ delete_venv:
dependencies: [] # this is needed because otherwise this job would fail when artifacts from previous job have expired
deploy_dev_lhcsmnb_eos:
stage: deploy_commons
stage: deploy
variables:
"EOS_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}/commons/lhcsmnb"
"EOS_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}/lhcsmnb"
"CI_OUTPUT_DIR": "lhc-sm-notebook/lhcsmnb"
"NB_BRANCH": "${CI_COMMIT_BRANCH}"
"METHOD": "rsync"
......@@ -196,9 +184,9 @@ deploy_dev_lhcsmnb_eos:
after_script: [ ]
deploy_pro_lhcsmnb_eos:
stage: deploy_commons
stage: deploy
variables:
"EOS_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_REF_NAME}/commons/lhcsmnb"
"EOS_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_REF_NAME}/lhcsmnb"
"CI_OUTPUT_DIR": "lhc-sm-notebook/lhcsmnb"
"NB_BRANCH": "master"
"METHOD": "rsync"
......@@ -211,6 +199,34 @@ deploy_pro_lhcsmnb_eos:
before_script: [ ]
after_script: [ ]
build_wheel:
stage: build
extends: .acc_py_build_wheel
only:
- tags
deploy_accpy:
stage: deploy
extends: .acc_py_release_wheel
dependencies:
- build_wheel
deploy_lhcsmanalysis_eos:
stage: deploy_analysis
variables:
"EOS_PUBLIC_PATH": "/eos/project/l/lhcsm/public/"
"VENV_PATH": "/eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}"
"SPARK3_REQUIREMENTS_FILE": swan-nxcals-spark3-requirements.txt
"SCRIPT_FILENAME": "${CI_COMMIT_BRANCH}.sh"
"ANALYSIS_BRANCH": "${CI_COMMIT_BRANCH}"
only:
- branches
image: gitlab-registry.cern.ch/ci-tools/ci-web-deployer
script:
- bash CI/deploy_analysis.sh
before_script: [ ]
after_script: [ ]
notebooks_exec:
only:
- dev
......@@ -220,6 +236,7 @@ notebooks_exec:
variables:
API_BRANCH: $CI_COMMIT_BRANCH
NB_BRANCH: $CI_COMMIT_BRANCH
ANALYSIS_BRANCH: $CI_COMMIT_BRANCH
stage: notebooks_exec
trigger:
project: LHCData/lhc-sm-hwc
......
#!/bin/bash
# This script is triggered by CI to clone a chosen branch of the lhc-sm-analysis repositories and copy their contents to the venv on EOS.
# The lhc-sm-analysis group contains several repositories with sigmon analysis packages.
# Contents of each package are copied to be accessible under a path /eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}/lhcsmapi/api/
# set ANALYSIS_BRANCH if not set
export ANALYSIS_BRANCH="${ANALYSIS_BRANCH:-$CI_COMMIT_BRANCH}"
yum install git -y
yum install jq -y
echo "${EOS_ACCOUNT_PASSWORD}" | kinit "${EOS_ACCOUNT_USERNAME}@CERN.CH"
REPOS=$(curl -s https://gitlab.cern.ch/api/v4/groups/152156 | jq -r ".projects[].http_url_to_repo")
for REPO in ${REPOS}; do
# extract the name
DIR=($(echo $REPO | cut -d/ -f6 | cut -d. -f1))
# check if branch exists
git ls-remote --exit-code --heads "${REPO}" "${ANALYSIS_BRANCH}"
if [ "$?" == "2" ]; then
echo "Branch '${ANALYSIS_BRANCH}' not found in ${DIR}, using 'dev' instead"
export ANALYSIS_BRANCH='dev'
fi;
git clone --single-branch --branch $ANALYSIS_BRANCH "${REPO}"
EOS_PATH=/eos/project/l/lhcsm/venv_${CI_COMMIT_BRANCH}/lhcsmapi/api/
rsync="/usr/bin/rsync"
if [ ! -x $rsync ]
then
echo ERROR: $rsync not found
exit 1
fi
# SSH will be used to connect to LXPLUS and there check if the EOS folder exists
ssh="/usr/bin/ssh"
if [ ! -x $ssh ]
then
echo ERROR: $ssh not found
exit 1
fi
# Copy contents to EOS
$rsync -abvuz -e "ssh -o StrictHostKeyChecking=no -o GSSAPIAuthentication=yes -o GSSAPITrustDNS=yes -o GSSAPIDelegateCredentials=yes" $DIR/lhcsmapi/api/analysis $EOS_ACCOUNT_USERNAME@lxplus.cern.ch:$EOS_PATH/
if [ $? -ne 0 ]
then
echo ERROR: Rsync to \"$EOS_PATH\" via lxplus.cern.ch, failed
exit 1
fi
done
# Environment scripts
These scripts are used by the experts when they create a new instance on SWAN.
`packages_notebooks.sh` is used to source the PRO env (`master` branch)
`packages_notebooks_dev.sh` is used to source the DEV env (`dev` branch)
They are deployed, together with the respective `lhcsmapi` version. `packages_notebooks.sh` is deployed every tag on `master` and `packages_notebooks.sh` is deployed every push on `dev`.
PYTHON_VERSION=`python3 -c 'import sys; version=sys.version_info[:2]; print("{0}.{1}".format(*version))'`
export PYTHONPATH=/eos/project/l/lhcsm/venv:$PYTHONPATH
rm -r $CERNBOX_HOME/SWAN_projects/hwc
cp -r /eos/project/l/lhcsm/hwc/lhc-sm-hwc/ $CERNBOX_HOME/SWAN_projects/hwc/
ln -s /eos/project/m/mp3/ $CERNBOX_HOME/mp3
\ No newline at end of file
PYTHON_VERSION=`python3 -c 'import sys; version=sys.version_info[:2]; print("{0}.{1}".format(*version))'`
export PYTHONPATH=/eos/project/l/lhcsm/venv_dev:$PYTHONPATH
rm -r $CERNBOX_HOME/SWAN_projects/hwc_dev
cp -r /eos/project/l/lhcsm/hwc_dev/lhc-sm-hwc/ $CERNBOX_HOME/SWAN_projects/hwc_dev/
ln -s /eos/project/m/mp3/ $CERNBOX_HOME/mp3
\ No newline at end of file
......@@ -4,64 +4,48 @@
# Then it creates a script file $SCRIPT_FILENAME, which later can be executed on SWAN to setup the python path pointing to the dependencies.
# IF $DEPLOY_PRO is set, a link in ${EOS_PUBLIC_PATH}/${SCRIPT_FILENAME} is created, linking to the just deployed version.
# Second, it creates the folder on EOS and install all required dependencies which are not available by default on SWAN.
# Since numpy 1.21 and python 3.6 are not compatible we need to install requirements for 'Nxcals Spark 3' and 'Nxcals Spark 2'
# separately and setup python path accordingly to the software stack loaded by the user.
# Required dependencies are defined in $SPARK3_REQUIREMENTS_FILE and $SPARK2_REQUIREMENTS_FILE respectively.
# WARNING: Please note that the SWAN scripts used by the experts need to be changed manually in case of any changes
# in the folder structure.
# Required dependencies are defined in $SPARK3_REQUIREMENTS_FILE.
# Together with dev and pro we also deploy respectively packages_notebooks_dev.sh and packages_noteoboks.sh. These are the scripts that the users
# source from SWAN to get the SIGMON dev or pro version.
SSH_OPTIONS="-o StrictHostKeyChecking=no -o GSSAPIAuthentication=yes -o GSSAPITrustDNS=yes -o GSSAPIDelegateCredentials=yes"
# This function creates the .sh file to source to load the venv.
# $1 is the spark3 dependencies directory inside the venv
# $2 is the spark2 dependencies directory inside the venv
# $3 is the commons dependencies directory inside the venv
# $4 is the script filename
function create_load_script () {
ssh ${SSH_OPTIONS} ${EOS_ACCOUNT_USERNAME}@lxplus <<EOF
echo "PYTHON_VERSION=\\\`python3 -c 'import sys; version=sys.version_info[:2]; print(\"{0}.{1}\".format(*version))'\\\`
if [[ \\\$PYTHON_VERSION == '3.8' ]] # for the Nxcals Spark 3
then
export PYTHONPATH=$1:\\\$PYTHONPATH
fi
if [[ \\\$PYTHON_VERSION == '3.6' ]] # for the Nxcals Spark 2
then
export PYTHONPATH=$2:\\\$PYTHONPATH
fi
export PYTHONPATH=$3:\\\$PYTHONPATH" > ${EOS_PUBLIC_PATH}/$4
# $1 is the venv directory
# $2 is the script filename
function create_load_script() {
ssh ${SSH_OPTIONS} ${EOS_ACCOUNT_USERNAME}@lxplus << EOF
echo "export PYTHONPATH=$1:\\\$PYTHONPATH" > ${EOS_PUBLIC_PATH}/$2
EOF
}
echo "${EOS_ACCOUNT_PASSWORD}" | kinit "${EOS_ACCOUNT_USERNAME}@CERN.CH"
### create script to load correct python path
create_load_script ${SPARK3_EOS_VENV_PATH} ${SPARK2_EOS_VENV_PATH} ${COMMONS_EOS_VENV_PATH} ${SCRIPT_FILENAME}
create_load_script ${VENV_PATH} ${SCRIPT_FILENAME}
echo "SWAN API environment path can be loaded using: ${EOS_PUBLIC_PATH}/${SCRIPT_FILENAME}"
### Install python requirements
ssh ${SSH_OPTIONS} ${EOS_ACCOUNT_USERNAME}@lxplus <<EOF
mkdir -p ${EOS_PATH}
mkdir -p ${SPARK2_EOS_VENV_PATH}
mkdir -p ${SPARK3_EOS_VENV_PATH}
ssh ${SSH_OPTIONS} ${EOS_ACCOUNT_USERNAME}@lxplus << EOF
mkdir -p ${EOS_PATH}
mkdir -p ${VENV_PATH}
EOF
if [[ -n ${DEPLOY_PRO} ]]; then
ssh ${SSH_OPTIONS} ${EOS_ACCOUNT_USERNAME}@lxplus <<EOF
ln -sfn ${EOS_PROJECT_PATH}/venv_${CI_COMMIT_REF_NAME} ${EOS_PROJECT_PATH}/venv
ssh ${SSH_OPTIONS} ${EOS_ACCOUNT_USERNAME}@lxplus << EOF
ln -sfn ${EOS_PROJECT_PATH}/venv_${CI_COMMIT_REF_NAME} ${EOS_PROJECT_PATH}/venv
EOF
scp ${SSH_OPTIONS} ./CI/env_scripts/packages_notebooks.sh ${EOS_ACCOUNT_USERNAME}@lxplus:${SCRIPT_PATH}
fi
### for Nxcals Spark 3
scp ${SSH_OPTIONS} ${SPARK3_REQUIREMENTS_FILE} ${EOS_ACCOUNT_USERNAME}@lxplus:${SPARK3_EOS_VENV_PATH}
ssh ${SSH_OPTIONS} ${EOS_ACCOUNT_USERNAME}@lxplus <<EOF
export PYTHONPATH=/cvmfs/projects.cern.ch/cryogenics/hepak/100_nxcals:/cvmfs/sft.cern.ch/lcg/views/LCG_100_nxcals/x86_64-centos7-gcc9-opt/python:/cvmfs/sft.cern.ch/lcg/views/LCG_100_nxcals/x86_64-centos7-gcc9-opt/lib:/cvmfs/sft.cern.ch/lcg/views/LCG_100_nxcals/x86_64-centos7-gcc9-opt/lib/python3.8/site-packages
/cvmfs/sft.cern.ch/lcg/views/LCG_100_nxcals/x86_64-centos7-gcc9-opt/bin/python3.8 -m pip install -r ${SPARK3_EOS_VENV_PATH}/$SPARK3_REQUIREMENTS_FILE --target=${SPARK3_EOS_VENV_PATH}
EOF
if [ "${CI_COMMIT_REF_NAME}" == "dev" ]; then
scp ${SSH_OPTIONS} ./CI/env_scripts/packages_notebooks_dev.sh ${EOS_ACCOUNT_USERNAME}@lxplus:${SCRIPT_PATH}
fi
### for Nxcals Spark 2
scp ${SSH_OPTIONS} ${SPARK2_REQUIREMENTS_FILE} ${EOS_ACCOUNT_USERNAME}@lxplus:${SPARK2_EOS_VENV_PATH}
ssh ${SSH_OPTIONS} ${EOS_ACCOUNT_USERNAME}@lxplus <<EOF
/eos/project/l/lhcsm/venv_dev/:/cvmfs/sft.cern.ch/lcg/views/LCG_95nxcalspython3_nxcals/x86_64-centos7-gcc7-opt/python:/cvmfs/sft.cern.ch/lcg/views/LCG_95nxcalspython3_nxcals/x86_64-centos7-gcc7-opt/lib:/cvmfs/sft.cern.ch/lcg/views/LCG_95nxcalspython3_nxcals/x86_64-centos7-gcc7-opt/lib/python3.6/site-packages
/cvmfs/sft.cern.ch/lcg/views/LCG_95nxcalspython3_nxcals/x86_64-centos7-gcc7-opt/bin/python3.6 -m pip install -r ${SPARK2_EOS_VENV_PATH}/$SPARK2_REQUIREMENTS_FILE --target=${SPARK2_EOS_VENV_PATH}
### for Nxcals Spark 3
scp ${SSH_OPTIONS} ${SPARK3_REQUIREMENTS_FILE} ${EOS_ACCOUNT_USERNAME}@lxplus:${VENV_PATH}
ssh ${SSH_OPTIONS} ${EOS_ACCOUNT_USERNAME}@lxplus << EOF
export PYTHONPATH=/cvmfs/projects.cern.ch/cryogenics/hepak/100_nxcals:/cvmfs/sft.cern.ch/lcg/views/LCG_100_nxcals/x86_64-centos7-gcc9-opt/python:/cvmfs/sft.cern.ch/lcg/views/LCG_100_nxcals/x86_64-centos7-gcc9-opt/lib:/cvmfs/sft.cern.ch/lcg/views/LCG_100_nxcals/x86_64-centos7-gcc9-opt/lib/python3.8/site-packages
/cvmfs/sft.cern.ch/lcg/views/LCG_100_nxcals/x86_64-centos7-gcc9-opt/bin/python3.8 -m pip install -r ${VENV_PATH}/$SPARK3_REQUIREMENTS_FILE --target=${VENV_PATH}
EOF
......@@ -5,104 +5,26 @@ This is a package with an API for signal access and processing for the LHC Signa
The API documentation is available at <https://sigmon.docs.cern.ch/api>
The User Guide is available at <https://sigmon-docs.web.cern.ch/>
The User Guide is available at <https://sigmon.docs.cern.ch/>
## Installation
There are two ways of using the API in your code:
1. Loading preinstalled packages from an EOS project folder (in SWAN environment)
2. Manual installation (in any environment)
The first option guarantees the use of the most recent code version without manual installation. The second one is more time consuming, however, works in environments with no access to the EOS folder (e.g., Apache Airflow scheduler). In addition, the second method allows to install a selected version (`pip install package_name=version`).
### Preinstalled Packages
To use the set of pre-installed packages please follow these three steps:
0. Contact the Signal Monitoring team (mailto:lhc-signal-monitoring@cern.ch) in order to get read access to the EOS folder with pre-installed packages.
1. (optional) Uninstall existing packages.
Historically, the initial way of installing the packages was by manual installation discussed in Section Manual Installation.
Thus, to avoid double reference to a package, please uninstall (with `pip uninstall package_name`) all packages needed for the API to work (tzlocal, tqdm, influxdb, plotly, lhcsmapi). This operation has to be done only once provided that the packages were installed (to check if a package was installed use `pip list | grep package_name` in SWAN Command Line Interface).
2. While logging to SWAN service, please add the environment script as `/eos/project/l/lhcsm/public/packages.sh`
<img src="https://gitlab.cern.ch/LHCData/lhc-sm-api/raw/master/figures/swan_environment_script.png" width=25%>
:warning: The project is currently under heavy refactoring and the documentation could be outdated. Please try the exemplary notebooks and don't hesitate to contact us at <lhc-signal-monitoring@cern.ch> if you have any doubts.
### Manual Installation
In order to use the API, it has to be installed with a python package installer as
## Installation
To install the `lhcsmapi` package with all the dependencies, you will need to setup a Python `venv` with access to the ACC-PY package repository.
```python
pip install --user lhcsmapi
```
Check the latest version at <a href="https://pypi.org/project/lhcsmapi/">https://pypi.org/project/lhcsmapi/</a>
The API relies on several external python packages which have to be installed in a similar manner. The list of packages is stored in the <u><i>requirements.txt</i></u> file.
If you use SWAN, the service provides a set of pre-installed python packages through CVMFS. The LHC-SM notebooks require installation of several additional packages on top of CVMFS. In order to install a package, please open a SWAN Terminal by clicking [>_] icon in the top right corner.
![SWAN CLI Button](https://gitlab.cern.ch/LHCData/lhc-sm-hwc/-/raw/master/figures/swan-cli-button.png)
Five additional python packages have to be installed:
- tzlocal - for time zone convertion
- tqdm - for progress bar to track queries
- influxdb - for communication with an Influxdb
- plotly - for interactive plotting of circuit schematics
- lhcsmapi - for LHC-SM API
In order to install a package please execute the following command
```
$ pip install --user package_name
```
The expected output, after installing all packages, is presented in five figures below.
- SWAN Terminal output after successful installation of tzlocal package.
![SWAN pip install tzlocal](https://gitlab.cern.ch/LHCData/lhc-sm-hwc/-/raw/master/figures/swan-pip-install-tzlocal.png)
- SWAN Terminal output after successful installation of tqdm package.
![SWAN pip install tqdm](https://gitlab.cern.ch/LHCData/lhc-sm-hwc/-/raw/master/figures/swan-pip-install-tqdm.png)
- SWAN Terminal output after successful installation of influxdb package.
![SWAN pip install influxdb](https://gitlab.cern.ch/LHCData/lhc-sm-hwc/-/raw/master/figures/swan-pip-install-influxdb.png)
- SWAN Terminal output after successful installation of plotly package.
![SWAN pip install plotly](https://gitlab.cern.ch/LHCData/lhc-sm-hwc/-/raw/master/figures/swan-pip-install-plotly.png)
- SWAN Terminal output after successful installation of lhcsmapi package.
![SWAN pip install lhcsmapi](https://gitlab.cern.ch/LHCData/lhc-sm-hwc/-/raw/master/figures/swan-pip-install-lhcsmapi.png)
### Updating lhcsmapi Package
Please note that the first four packages (tzlocal, tqdm, influxdb, plotly) have to be installed only once while the last one is still in the development phase and subject to frequent updates. Please send us an e-mail request (mailto:lhc-signal-monitoring@cern.ch) if you want to subscribe for updates. In order to update the lhcsmapi package, please execute the following command.
```
$ pip install --user --upgrade lhcsmapi
python -m venv ./my_venv
source ./my_venv/bin/activate
python -m pip install git+https://gitlab.cern.ch/acc-co/devops/python/acc-py-pip-config.git
python -m pip install lhcsmapi
```
For the specific versions of our dependencies, you can consult the `test-requirements.txt`.
### Known Issues
At times, in order to update the lhcsmapi package one has to execute the command
```
pip install --user --upgrade lhcsmapi
```
twice while using the SWAN terminal (cf. an error message in the figure below).
<img src="https://gitlab.cern.ch/LHCData/lhc-sm-api/raw/master/figures/double_reinstallation_error.png">
In case this command returns an error, please try to execute it again. Should that operation also fail, please uninstall the package by executing
```
$ pip uninstall lhcsmapi
```
and performing a fresh installation the package
```
$ pip install --user lhcsmapi
```
:pencil: The library is available on EOS in the form of preinstalled packages so it can be loaded e.g. on SWAN. Please consult the `lhc-sm-hwc` README for the instructions.
Should you experience any further issues with installing a package, please contact <a href="https://swan.web.cern.ch">SWAN support</a> or use the preinstalled package with the environment script.
### NXCALS Access with SWAN
## NXCALS Access
The API allows to perform a query of signals from PM and NXCALS. The NXCALS database requires an assignment of dedicated access rights for a user.
If you want to query NXCALS with the API, please follow a procedure below on how to request the NXCALS access.
If you want to query NXCALS with the API, please request the access following the procedure described in http://nxcals-docs.web.cern.ch/current/user-guide/data-access/nxcals-access-request/. You will need WinCCOA and CMW systems in the PRO environment.
1. Go to http://nxcals-docs.web.cern.ch/current/user-guide/data-access/nxcals-access-request/ for most updated procedure
2. Send an e-mail to mailto:acc-logging-support@cern.ch with the following pieces of information:
- your NICE username
- system: WinCCOA, CMW
- NXCALS environment: PRO
Optionally one can mention that the NXCALS database will be accessed through SWAN.
Once the access is granted, you can use NXCALS with SWAN.
\ No newline at end of file
Once the access is granted, you can query NXCALS. Please note that you need an active Kerberos ticket.
RELEASE NOTES
=============
Version: 1.6.0
- queries that uses `ParametersResolver` implemented: [SIGMON-316](https://its.cern.ch/jira/browse/SIGMON-316)
- removed support for Nxcals Spark 2 on SWAN [SIGMON-283](https://its.cern.ch/jira/browse/SIGMON-283)
- tests for the QueryParams domain objects [SIGMON-319](https://its.cern.ch/jira/browse/SIGMON-319)
- do not assert sigmon version in a test [SIGMON-317](https://its.cern.ch/jira/browse/SIGMON-317)
- `SignalTransformationBuilder` removed [SIGMON-291](https://its.cern.ch/jira/browse/SIGMON-291)
- `ResistanceBuilder` removed [SIGMON-290](https://its.cern.ch/jira/browse/SIGMON-290)
- tutorial notebook that presents the new 'signal analysis' API written [SIGMON-321](https://its.cern.ch/jira/browse/SIGMON-321)
- `FeatureQuery` removed [SIGMON-295](https://its.cern.ch/jira/browse/SIGMON-295)
- query API examplery notebook written [SIGMON-320](https://its.cern.ch/jira/browse/SIGMON-320)
- support for CRYO system added [SIGMON-336](https://its.cern.ch/jira/browse/SIGMON-336)
- `AnalysisManager` created [SIGMON-329](https://its.cern.ch/jira/browse/SIGMON-329)
- new QHDA references [SIGMON-339](https://its.cern.ch/jira/browse/SIGMON-339)
- differences between query API documented [SIGMON-341](https://its.cern.ch/jira/browse/SIGMON-341)
- IPQ search tool fix [SIGMON-351](https://its.cern.ch/jira/browse/SIGMON-351)
- plateau detection algorithm improved [SIGMON-322](https://its.cern.ch/jira/browse/SIGMON-322)
- backwards compatible `QueryBuilder` written [SIGMON-299](https://its.cern.ch/jira/browse/SIGMON-299)
- extract a separate `lhcsmapi-qh` package [SIGMON-326](https://its.cern.ch/jira/browse/SIGMON-326)
- installation manual updated [SIGMON-359](https://its.cern.ch/jira/browse/SIGMON-359)
- bugfix for comparisons [SIGMON-337](https://its.cern.ch/jira/browse/SIGMON-337)
- analysis output classes added [SIGMON-355](https://its.cern.ch/jira/browse/SIGMON-355)
- test coverage increased [SIGMON-261](https://its.cern.ch/jira/browse/SIGMON-261)
- AN_RB_FPA corrections: events of class `ext` filtered out, only corresponding subsector queried for events, plots fixed [SIGMON-352](https://its.cern.ch/jira/browse/SIGMON-352)
- 100 first points removed from signal used for a quench detection [SIGMON-335](https://its.cern.ch/jira/browse/SIGMON-335)
- PNO.b2 / FPA notebooks won't fail on empty new QPS [SIGMON-318](https://its.cern.ch/jira/browse/SIGMON-318)
- deployment to EOS of the analysis packages done [SIGMON-364](https://its.cern.ch/jira/browse/SIGMON-364)
- a searching box in RCBXHV and RCDO FPA notebooks fixed
- deploy `lhcsmapi` to AccPy (instead of PyPi) [SIGMON-325](https://its.cern.ch/jira/browse/SIGMON-325)
- deployment to EOS of the `lhc-sm-analysis` fixed [SIGMON-369](https://its.cern.ch/jira/browse/SIGMON-369)
- `lhcsmapi.gui.pc` refactored [SIGMON-356](https://its.cern.ch/jira/browse/SIGMON-356)
- `lhcsmapi` deployed to AccPy (instead of PyPi) [SIGMON-325](https://its.cern.ch/jira/browse/SIGMON-325)
Version: 1.5.21
- `cern` prefix removed from the nxcals imports: [SIGMON-307](https://its.cern.ch/jira/browse/SIGMON-307)
- separate class to resolve signal metadata: [SIGMON-297](https://its.cern.ch/jira/browse/SIGMON-297)
......
......@@ -11,17 +11,6 @@ There are two ways of using the API in your code:
The first option guarantees the use of the most recent code version without manual installation. The second one is more time consuming, however, works in environments with no access to the EOS folder (e.g., Apache Airflow scheduler). In addition, the second method allows to install a selected version (`pip install package_name=version`).
### Preinstalled Packages
To use the set of pre-installed packages please follow these three steps:
0. Contact the Signal Monitoring team (mailto:lhc-signal-monitoring@cern.ch) in order to get read access to the EOS folder with pre-installed packages.
1. (optional) Uninstall existing packages.
Historically, the initial way of installing the packages was by manual installation discussed in Section Manual Installation.
Thus, to avoid double reference to a package, please uninstall (with `pip uninstall package_name`) all packages needed for the API to work (tzlocal, tqdm, influxdb, plotly, lhcsmapi). This operation has to be done only once provided that the packages were installed (to check if a package was installed use `pip list | grep package_name` in SWAN Command Line Interface).
2. While logging to SWAN service, please add the environment script as `/eos/project/l/lhcsm/public/packages.sh`
<img src="https://gitlab.cern.ch/LHCData/lhc-sm-api/raw/master/figures/swan_environment_script.png" width=25%>
### Manual Installation
In order to use the API, it has to be installed with a python package installer as
......
{
"cells": [
{
"cell_type": "markdown",
"id": "548d6435",
"metadata": {},
"source": [
"# Analysis functions examples"
]
},
{
"cell_type": "markdown",
"id": "ddeb6ccf",
"metadata": {},
"source": [
"This notebook shows some examples on how to use SIGMON analysis layer.\n",
"It is divided into two sections: the first one shows how to use the `calculate_features` function; the second one shows the usage of the `SignalProcessing` class."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3a3ca6bf",
"metadata": {},
"outputs": [],
"source": [
"import datetime\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import pandas as pd\n",
"\n",
"from lhcsmapi.signal import features as signal_features\n",
"from lhcsmapi.signal import functions as signal_features_functions\n",
"import lhcsmapi.api.processing"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c14b8814",
"metadata": {},
"outputs": [],
"source": [
"%matplotlib inline"
]
},
{
"cell_type": "markdown",
"id": "573889c2",
"metadata": {},
"source": [
"# Load signals"
]
},
{
"cell_type": "markdown",
"id": "6f653f09",
"metadata": {},
"source": [
"We're loading 2 signals to use for the examples. These are 2 current signals."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "095092b9",
"metadata": {},
"outputs": [],
"source": [
"i1_df = pd.read_csv('current1.csv', index_col=0)\n",
"i2_df = pd.read_csv('current2.csv', index_col=0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "176112bb",
"metadata": {},
"outputs": [],
"source": [
"plt.figure()\n",
"plt.plot(i1_df, label='current1')\n",
"plt.plot(i2_df, label='current2')\n",
"plt.legend()"
]
},
{
"cell_type": "markdown",
"id": "3372a3a4",
"metadata": {},
"source": [
"# lhcsmapi.signal"
]
},
{
"cell_type": "markdown",
"id": "0b4c9010",
"metadata": {},
"source": [
"This module is used to calculate features of a signal. It is assumed that each dataframe has only one column.\n",
"\n",
"It can accept single or multiple dataframes as input, as well as single or multiple features to calculate.\n",
"In general the result is a new dataframes with a single row, where the columns are `SIGNAL:feature`"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "37bb323d",
"metadata": {},
"outputs": [],
"source": [
"signal_features.calculate_features([i1_df, i2_df], np.mean)"
]
},
{
"cell_type": "markdown",
"id": "5d270994",
"metadata": {},
"source": [
"If a single dataframe and a single feature are passed as input, the result is a scalar."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "08ee7886",
"metadata": {},
"outputs": [],